예제 #1
0
def get_forces_and_stress(totalarray):
    """Separates the forces and stress in two different arrays"""
    forces = orm.ArrayData()
    forces.set_array(name='forces', array=totalarray.get_array('forces'))
    stress = orm.ArrayData()
    stress.set_array(name='stress', array=totalarray.get_array('stress'))
    return {'forces': forces, 'stress': stress}
예제 #2
0
def extract_from_trajectory(trajectory):
    """Return the forces and stress arrays from the given trajectory data."""
    forces = orm.ArrayData()
    forces.set_array(name='forces', array=trajectory.get_array('forces')[-1])

    stress = orm.ArrayData()
    stress.set_array(name='stress', array=trajectory.get_array('stress')[-1])

    return {'forces': forces, 'stress': stress}
예제 #3
0
def test_pw_get_scf_accuracy(aiida_profile, fixture_localhost,
                             generate_calc_job_node):
    """Test the `PwCalculationTools.get_scf_accuracy` method."""
    entry_point_name = 'quantumespresso.pw'

    # Missing `output_trajectory` node
    node = generate_calc_job_node(entry_point_name, fixture_localhost)
    with pytest.raises(ValueError):
        node.tools.get_scf_accuracy()

    # Missing `scf_accuracy` array
    node = generate_calc_job_node(entry_point_name, fixture_localhost)
    trajectory = orm.ArrayData()
    trajectory.add_incoming(node,
                            link_type=LinkType.CREATE,
                            link_label='output_trajectory')
    trajectory.store()

    with pytest.raises(ValueError):
        node.tools.get_scf_accuracy()

    # Missing `scf_accuracy_index` array
    node = generate_calc_job_node(entry_point_name, fixture_localhost)
    trajectory = orm.ArrayData()
    trajectory.set_array('scf_accuracy', np.array([1, 1, 1, 2, 2, 2, 2, 2]))
    trajectory.add_incoming(node,
                            link_type=LinkType.CREATE,
                            link_label='output_trajectory')
    trajectory.store()

    with pytest.raises(ValueError):
        node.tools.get_scf_accuracy()

    node = generate_calc_job_node(entry_point_name, fixture_localhost)
    trajectory = orm.ArrayData()
    trajectory.set_array('scf_accuracy', np.array([1, 1, 1, 2, 2, 2, 2, 2]))
    trajectory.set_array('scf_iterations', np.array([3, 5]))
    trajectory.add_incoming(node,
                            link_type=LinkType.CREATE,
                            link_label='output_trajectory')
    trajectory.store()

    # Invalid indices, there are only two frames
    with pytest.raises(IndexError):
        node.tools.get_scf_accuracy(index=2)
    with pytest.raises(IndexError):
        node.tools.get_scf_accuracy(index=-3)

    assert np.array_equal(node.tools.get_scf_accuracy(index=0),
                          np.array([1, 1, 1]))
    assert np.array_equal(node.tools.get_scf_accuracy(index=1),
                          np.array([2, 2, 2, 2, 2]))
    assert np.array_equal(node.tools.get_scf_accuracy(index=-1),
                          np.array([2, 2, 2, 2, 2]))
    assert np.array_equal(node.tools.get_scf_accuracy(index=-2),
                          np.array([1, 1, 1]))
예제 #4
0
    def parse(self, **kwargs):
        """Parse the contents of the output files stored in the `retrieved` output node."""

        try:
            with self.retrieved.open(self.node.get_option("output_filename"),
                                     "r") as handle:
                result = handle.read().strip()
        except OSError:
            return self.exit_codes.ERROR_READING_OUTPUT_FILE

        try:
            with self.retrieved.open(self.node.get_option("payload_filename"),
                                     "r") as handle:
                pass
        except OSError:
            return self.exit_codes.ERROR_READING_PAYLOAD_FILE

        self.out("result", orm.Bool(result == "success"))
        self.out(
            "out_dict",
            orm.Dict(
                dict={
                    f"output_key_{i}": f"value_{i}"
                    for i in range(self.node.get_option("output_dict_size"))
                }),
        )
        array = orm.ArrayData()
        array.set_array("example",
                        np.ones(self.node.get_option("output_array_size")))
        self.out("out_array", array)

        if not result == "success":
            return self.exit_codes.ERROR_FAILED_OUTPUT
예제 #5
0
def merge_crossing_results(**kwargs):
    """Merge the results of multiple call of `get_crossing_and_lowgap_points`."""
    structure = kwargs.pop('structure')
    cell = structure.cell
    recipr = recipr_base(cell)

    merge = np.empty((0, 3))
    for array in kwargs.values():
        found = array.get_array('found')
        merge = np.vstack((merge, found))

    new = []
    if len(merge):
        merge = np.unique(merge, axis=0)

        if len(merge) > 1:
            merge_cart = np.dot(merge, recipr)
            aggl = AgglomerativeClustering(n_clusters=None,
                                           distance_threshold=0.005,
                                           linkage='average')
            res = aggl.fit(merge_cart)

            for n in np.unique(res.labels_):
                w = np.where(res.labels_ == n)[0]
                new.append(np.average(merge[w], axis=0))
        else:
            new = merge

    new = np.array(new)

    res = orm.ArrayData()
    res.set_array('crossings', new)

    return res
예제 #6
0
def get_forces(parameters):
    """Return the forces array [eV/ang] from the output parameters node."""
    # cclib parser keeps forces in au
    forces_au = np.array(parameters['grads'][-1])
    forces_arr = orm.ArrayData()
    forces_arr.set_array(name='forces', array=forces_au * ANG_TO_BOHR / EV_TO_EH)
    return forces_arr
예제 #7
0
def get_forces_output_folder(folder, structure):
    """Return the forces array from the retrieved output files."""
    natoms = len(structure.sites)
    # Open files and extract the lines with forces.
    try:
        content = folder.get_object_content('aiida-frc-1.xyz')
        lines = content.splitlines()[-natoms:]
        forces_position = 1
    except FileNotFoundError:
        try:
            content = folder.get_object_content(
                'aiida-requested-forces-1_0.xyz')
            lines = re.search('Atom   Kind   Element(.*?)SUM OF ATOMIC FORCES',
                              content,
                              flags=re.S).group(1).splitlines()[1:-1]
            forces_position = 3
        except FileNotFoundError:
            return None

    # Extract forces.
    forces_array = np.empty((natoms, 3))
    for i, line in enumerate(lines):
        forces_array[i] = [
            float(s) for s in line.split()[forces_position:forces_position + 3]
        ]
    forces = orm.ArrayData()
    forces.set_array(name='forces', array=forces_array * HA_BOHR_TO_EV_A)
    return forces
예제 #8
0
def finilize_cross_results(cross_data, gap_threshold):
    """Analyze the final result of kpt-cross calculation, and return valid crossings."""
    if not isinstance(cross_data, orm.ArrayData):
        raise InputValidationError(
            'Invalide type {} for parameter `cross_data`'.format(
                type(cross_data)))
    if not isinstance(gap_threshold, orm.Float):
        raise InputValidationError(
            'Invalide type {} for parameter `gap_threshold`'.format(
                type(gap_threshold)))

    gap_thr = gap_threshold.value
    kpts = cross_data.get_array('kpoints')
    gaps = cross_data.get_array('gaps')

    w1 = np.where(gaps <= gap_thr)[0]
    w2 = np.where(gaps > gap_thr)[0]

    crossings = kpts[w1, :]
    low_gap = kpts[w2, :]

    res = orm.ArrayData()
    res.set_array('crossings', crossings)
    res.set_array('cr_gaps', gaps[w1])
    res.set_array('low_gap', low_gap)
    res.set_array('cr_lg', gaps[w2])

    return res
예제 #9
0
def get_stress(parameters):
    """Return the stress array from the given parameters node."""
    stress = orm.ArrayData()
    stress.set_array(name='stress',
                     array=np.array(
                         parameters.get_attribute('cart_stress_tensor')))
    return stress
예제 #10
0
    def parse_gnuplot2D(self, data_file_str):
        """Parse 2D GNUPlot formatted output.

        :param data_file_str: the data file read in as a single string
        """
        data_lines = data_file_str.splitlines()

        coords = []
        data = []

        for line in data_lines:
            stripped = line.strip()
            if stripped == '':
                continue
            else:
                split_line = stripped.split()
                coords.append([float(split_line[0]), float(split_line[1])])
                data.append(float(split_line[2]))

        coords_units = 'bohr'
        data_units = self.units_dict[self.output_parameters['plot_num']]
        arraydata = orm.ArrayData()
        arraydata.set_array('xy_coordinates', np.array(coords))
        arraydata.set_array('data', np.array(data))
        arraydata.set_array('xy_coordinates_units', np.array(coords_units))
        arraydata.set_array('data_units', np.array(data_units))

        return arraydata
예제 #11
0
def get_forces_from_trajectory(trajectory):  # pylint: disable=unused-argument
    """Calcfunction to get forces from trajectory"""
    forces = orm.ArrayData()
    # currently the fleur relax workchain does not output trajectory data,
    # but it will be adapted to do so
    # largest forces are found in workchain output nodes
    # forces.set_array(name='forces', array=trajectory.get_array('forces')[-1])
    return forces
예제 #12
0
def get_stress_output_folder(folder):
    """Return the stress array from the retrieved output files."""
    string = folder.get_object_content('aiida-1.stress')
    stress = orm.ArrayData()
    stress_array = np.array(string.splitlines()[-1].split()[2:],
                            dtype=float) / EV_A3_TO_BAR
    stress.set_array(name='stress', array=stress_array.reshape(3, 3))
    return stress
예제 #13
0
def get_forces_output_folder(folder):
    """Return the forces array from the retrieved output files."""
    string_content = folder.get_object_content('aiida-frc-1.xyz')
    lines = string_content.splitlines()
    natoms = int(lines[0])
    forces_array = np.empty((natoms, 3))
    for i, line in enumerate(lines[-natoms:]):
        forces_array[i] = [float(s) for s in line.split()[1:]]
    forces = orm.ArrayData()
    forces.set_array(name='forces', array=forces_array * HA_BOHR_TO_EV_A)
    return forces
예제 #14
0
def analyze_kpt_cross(bands_data, old_data, gap_threshold):
    """Analyze the result of kpt-cross calculation, returning the list of lowst gap and skippable points."""
    if not isinstance(bands_data, orm.BandsData):
        raise InputValidationError(
            'Invalide type {} for parameter `bands_data`'.format(
                type(bands_data)))
    if not isinstance(old_data, orm.ArrayData):
        raise InputValidationError(
            'Invalide type {} for parameter `old_data`'.format(type(old_data)))
    if not isinstance(gap_threshold, orm.Float):
        raise InputValidationError(
            'Invalide type {} for parameter `gap_threshold`'.format(
                type(gap_threshold)))

    gap_thr = gap_threshold.value
    calculation = bands_data.creator
    gaps = get_gap_array_from_PwCalc(calculation)
    kpt_cryst = bands_data.get_kpoints()

    res = orm.ArrayData()

    gaps = np.array(gaps).reshape(-1, 7)

    min_pos = np.argmin(gaps, axis=1)
    min_gap = np.min(gaps, axis=1)

    app = np.where((min_pos == 3) | (min_gap < gap_thr))[0]
    new_skips = np.zeros(min_pos.shape)
    new_skips[app] = 1
    app_kpt = kpt_cryst.reshape(-1, 7, 3)
    new_kpt = app_kpt[list(range(len(min_pos))), min_pos, :]

    try:
        kpt = old_data.get_array('kpoints')
        gaps = old_data.get_array('gaps')
        skips = old_data.get_array('skips')
    except:
        kpt = new_kpt
        gaps = min_gap
        skips = new_skips
    else:
        w = np.where(skips == 0)

        kpt[w] = new_kpt
        gaps[w] = min_gap
        skips[w] = new_skips

    res.set_array('skips', skips)
    res.set_array('kpoints', kpt)
    res.set_array('gaps', gaps)

    return res
예제 #15
0
def get_forces_from_trajectory(trajectory):
    """Return the forces array from the given trajectory data."""
    forces = orm.ArrayData()
    arraynames = trajectory.get_arraynames()
    # Raw forces takes the precedence here
    # Forces are already in eV/Angstrom
    if 'forces' in arraynames:
        array_ = trajectory.get_array('forces')
    else:
        array_ = trajectory.get_array('cons_forces')

    forces.set_array(name='forces', array=array_[-1])
    return forces
예제 #16
0
    def parse_gaussian(self, data_file_str):
        """Parse Gaussian Cube formatted output.

        :param data_file_str: the data file read in as a single string
        """
        lines = data_file_str.splitlines()

        atoms_line = lines[2].split()
        natoms = int(atoms_line[0])  # The number of atoms listed in the file
        origin = np.array(atoms_line[1:], dtype=float)

        header = lines[:6 +
                       natoms]  # Header of the file: comments, the voxel, and the number of atoms and datapoints
        data_lines = lines[
            6 + natoms:]  # The actual data: atoms and volumetric data

        # Parse the declared dimensions of the volumetric data
        x_line = header[3].split()
        xdim = int(x_line[0])
        y_line = header[4].split()
        ydim = int(y_line[0])
        z_line = header[5].split()
        zdim = int(z_line[0])

        # Get the vectors describing the basis voxel
        voxel_array = np.array([[x_line[1], x_line[2], x_line[3]],
                                [y_line[1], y_line[2], y_line[3]],
                                [z_line[1], z_line[2], z_line[3]]],
                               dtype=np.float64)

        # Get the volumetric data
        data_array = np.empty(xdim * ydim * zdim, dtype=float)
        cursor = 0
        for line in data_lines:
            ls = line.split()
            data_array[cursor:cursor + len(ls)] = ls
            cursor += len(ls)
        data_array = data_array.reshape((xdim, ydim, zdim))

        coordinates_units = 'bohr'
        data_units = self.units_dict[self.output_parameters['plot_num']]

        arraydata = orm.ArrayData()
        arraydata.set_array('voxel', voxel_array)
        arraydata.set_array('data', data_array)
        arraydata.set_array('data_units', np.array(data_units))
        arraydata.set_array('coordinates_units', np.array(coordinates_units))

        return arraydata
예제 #17
0
    def parse_gnuplot1D(self, data_file_str):
        """Parse 1D GNUPlot formatted output.

        :param data_file_str: the data file read in as a single string
        """
        data_lines = data_file_str.splitlines()

        n_col = len(data_lines[0].split())

        # 1D case
        if n_col == 2:
            coords = []
            data = []
            data_integral = []
            for line in data_lines:
                split_line = line.split()
                coords.append(float(split_line[0]))
                data.append(float(split_line[1]))
            y_data = [data]
            y_names = ['data']
            y_units = [self.units_dict[self.output_parameters['plot_num']]]

        # 1D case with spherical averaging
        if n_col == 3:
            coords = []
            data = []
            data_integral = []
            for line in data_lines:
                split_line = line.split()
                coords.append(float(split_line[0]))
                data.append(float(split_line[1]))
                data_integral.append(float(split_line[2]))
            y_data = [data, data_integral]
            y_names = ['data', 'integrated_data']
            unit = self.units_dict[self.output_parameters['plot_num']]
            y_units = [unit, unit.replace('bohr^3', 'bohr')]

        x_units = 'bohr'
        arraydata = orm.ArrayData()
        arraydata.set_array('x_coordinates', np.array(coords))
        arraydata.set_array('x_coordinates_units', np.array(x_units))
        for name, data, units in zip(y_names, y_data, y_units):
            arraydata.set_array(name, np.array(data))
            arraydata.set_array(name + '_units', np.array(units))

        return arraydata
예제 #18
0
    def parse_gnuplot_polar(self, data_file_str):
        """Parse 2D Polar GNUPlot formatted, single column output.

        :param data_file_str: the data file read in as a single string
        """
        data_lines = data_file_str.splitlines()
        data_lines.pop(0)  # First line is a header

        data = []
        for line in data_lines:
            data.append(float(line))
        data_units = [self.units_dict[self.output_parameters['plot_num']]]

        arraydata = orm.ArrayData()
        arraydata.set_array('data', np.array(data))
        arraydata.set_array('data_units', np.array(data_units))

        return arraydata
예제 #19
0
def make_supercell(structure, supercell):
    if not isinstance(supercell, orm.ArrayData):
        app = orm.ArrayData()
        app.set_array('data', np.array(supercell))
        supercell = app
    pk = structure.pk
    sc = tuple(supercell.get_array('data'))
    if pk in save:
        if sc in save[pk]:
            return save[pk][sc]

    res = _make_supercell(structure, supercell)

    if not pk in save:
        save[pk] = {}
    save[pk][sc] = res

    return res
예제 #20
0
def get_stress_from_trajectory(trajectory):
    """Return the stress array from the given trajectory data."""

    # Taken from http://greif.geo.berkeley.edu/~driver/conversions.html
    # 1 eV/Angstrom3 = 160.21766208 GPa
    ev_to_gpa = 160.21766208

    stress = orm.ArrayData()

    arraynames = trajectory.get_arraynames()
    # Raw stress takes the precedence here
    if 'stress' in arraynames:
        array_ = trajectory.get_array('stress')
    else:
        array_ = trajectory.get_array('symm_stress')
    # Convert stress back to eV/Angstrom3, CASTEP output in GPa
    stress.set_array(name='stress', array=array_[-1] / ev_to_gpa)
    return stress
예제 #21
0
def test_outputs(aiida_profile, generate_workchain_stm, generate_wc_job_node,
                 generate_calc_job_node, fixture_localhost):
    """Test `SiestaSTMWorkChain`."""

    process = generate_workchain_stm()
    process.checks()

    name = 'default'
    entry_point_calc_job = 'siesta.stm'
    inputs = AttributeDict({'spin_option': orm.Str("q")})
    attributes = AttributeDict({
        'input_filename': 'stm.in',
        'output_filename': 'stm.out'
    })
    stm_node = generate_calc_job_node(entry_point_calc_job, fixture_localhost,
                                      name, inputs, attributes)
    stm_node.set_process_state(ProcessState.FINISHED)
    stm_node.set_exit_status(ExitCode(0).status)
    stm_array = orm.ArrayData()
    stm_array.add_incoming(stm_node,
                           link_type=LinkType.CREATE,
                           link_label='stm_array')
    stm_array.store()
    process.ctx.stm_calc = stm_node

    first_basewc = generate_wc_job_node("siesta.base", fixture_localhost)
    out_par = orm.Dict(dict={"variable_geometry": False})
    out_par.store()
    out_par.add_incoming(first_basewc,
                         link_type=LinkType.RETURN,
                         link_label='output_parameters')
    process.ctx.workchain_base = first_basewc

    result = process.run_results()

    assert result == ExitCode(0)
    assert isinstance(process.outputs["stm_array"], orm.ArrayData)
예제 #22
0
    def parse_eps_files(self):
        """Parse the eps*.dat files produced by pw2gw.x and store them in the `eps` node."""
        retrieved = self.retrieved
        retrieved_names = retrieved.list_object_names()

        files = Pw2gwCalculation._internal_retrieve_list
        if any(_ not in retrieved_names for _ in files):
            self.exit_code_eps = self.exit_codes.ERROR_OUTPUT_FILES
            return

        energy = None
        eps = orm.ArrayData()
        for name in Pw2gwCalculation._internal_retrieve_list:
            content = retrieved.get_object_content(name)
            base = name.split('.')[0]

            try:
                data = np.loadtxt(StringIO(content))
            except ValueError:
                self.exit_code_eps = self.exit_codes.ERROR_OUTPUT_FILES
                return
            if len(data.shape
                   ) != 2 or data.shape[0] == 0 or data.shape[1] != 2:
                self.exit_code_eps = self.exit_codes.ERROR_OUTPUT_FILES_INVALID_FORMAT
                return

            x, y = data.T
            if energy is None:
                energy = x
                eps.set_array('energy', x)
            elif not np.allclose(x, energy):
                self.exit_code_eps = self.exit_codes.ERROR_OUTPUT_FILES_ENERGY_MISMATCH
                return

            eps.set_array(base, y)

        return eps
예제 #23
0
    def prepare_for_submission(self, folder):  # pylint: disable=too-many-statements,too-many-branches
        """Prepare the calculation job for submission by transforming input nodes into input files.

        In addition to the input files being written to the sandbox folder, a `CalcInfo` instance will be returned that
        contains lists of files that need to be copied to the remote machine before job submission, as well as file
        lists that are to be retrieved after job completion.

        :param folder: a sandbox folder to temporarily write files on disk.
        :return: :py:`~aiida.common.datastructures.CalcInfo` instance.
        """

        def test_offset(offset):
            """Check if the grid has an offset."""
            if any([i != 0. for i in offset]):
                raise NotImplementedError(
                    'Computation of electron-phonon on a mesh with non zero offset is not implemented, '
                    'at the level of epw.x')

        # pylint: disable=too-many-statements,too-many-branches
        local_copy_list = []
        remote_copy_list = []
        remote_symlink_list = []

        if 'settings' in self.inputs:
            settings = _uppercase_dict(self.inputs.settings.get_dict(), dict_name='settings')
        else:
            settings = {}

        # Copy nscf folder
        parent_folder_nscf = self.inputs.parent_folder_nscf
        parent_calc_nscf = parent_folder_nscf.creator

        if parent_calc_nscf is None:
            raise exceptions.NotExistent('parent_folder<{}> has no parent calculation'.format(parent_folder_nscf.pk))

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc_nscf.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'.format(
                    parent_calc_nscf.computer.get_name()))

        # put by default, default_parent_output_folder = ./out
        parent_calc_out_subfolder_nscf = parent_calc_nscf.process_class._OUTPUT_SUBFOLDER # pylint: disable=protected-access

        # Now phonon folder
        parent_folder_ph = self.inputs.parent_folder_ph
        parent_calc_ph = parent_folder_ph.creator

        # Also, the parent calculation must be on the same computer
        if not self.node.computer.uuid == parent_calc_ph.computer.uuid:
            raise exceptions.InputValidationError(
                'Calculation has to be launched on the same computer as that of the parent: {}'.format(
                    parent_calc_ph.computer.get_name()))

        # I put the first-level keys as uppercase (i.e., namelist and card names) and the second-level keys as lowercase
        parameters = _uppercase_dict(self.inputs.parameters.get_dict(), dict_name='parameters')
        parameters = {k: _lowercase_dict(v, dict_name=k) for k, v in parameters.items()}

        if 'INPUTEPW' not in parameters:
            raise exceptions.InputValidationError('required namelist INPUTEPW not specified')

        parameters['INPUTEPW']['outdir'] = self._OUTPUT_SUBFOLDER
        parameters['INPUTEPW']['iverbosity'] = 1
        parameters['INPUTEPW']['prefix'] = self._PREFIX

        try:
            mesh, offset = self.inputs.qpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nq1'] = mesh[0]
            parameters['INPUTEPW']['nq2'] = mesh[1]
            parameters['INPUTEPW']['nq3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the coarse q-point grid') from exception

        try:
            mesh, offset = self.inputs.kpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nk1'] = mesh[0]
            parameters['INPUTEPW']['nk2'] = mesh[1]
            parameters['INPUTEPW']['nk3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the coarse k-point grid') from exception

        try:
            mesh, offset = self.inputs.qfpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nqf1'] = mesh[0]
            parameters['INPUTEPW']['nqf2'] = mesh[1]
            parameters['INPUTEPW']['nqf3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the fine q-point grid') from exception

        try:
            mesh, offset = self.inputs.kfpoints.get_kpoints_mesh()
            test_offset(offset)
            parameters['INPUTEPW']['nkf1'] = mesh[0]
            parameters['INPUTEPW']['nkf2'] = mesh[1]
            parameters['INPUTEPW']['nkf3'] = mesh[2]
            postpend_text = None
        except NotImplementedError as exception:
            raise exceptions.InputValidationError('Cannot get the fine k-point grid') from exception


        # customized namelists, otherwise not present in the distributed epw code
        try:
            namelists_toprint = settings.pop('NAMELISTS')
            if not isinstance(namelists_toprint, list):
                raise exceptions.InputValidationError(
                    "The 'NAMELISTS' value, if specified in the settings input "
                    'node, must be a list of strings')
        except KeyError:  # list of namelists not specified in the settings; do automatic detection
            namelists_toprint = self._compulsory_namelists


        # create the save folder with dvscf and dyn files.
        folder.get_subfolder(self._FOLDER_SAVE, create=True)

        # List of IBZ q-point to be added below EPW. To be removed when removed from EPW.
        qibz_ar = []
        for key, value in sorted(parent_folder_ph.creator.outputs.output_parameters.get_dict().items()):
            if key.startswith('dynamical_matrix_'):
                qibz_ar.append(value['q_point'])

        qibz_node = orm.ArrayData()
        qibz_node.set_array('qibz', np.array(qibz_ar))

        list_of_points = qibz_node.get_array('qibz')
        # Number of q-point in the irreducible Brillouin Zone.
        nqpt = len(list_of_points[0, :])

        # add here the list of point coordinates
        if len(list_of_points) > 1:
            postpend_text = '{} cartesian\n'.format(len(list_of_points))
            for points in list_of_points:
                postpend_text += '{0:18.10f} {1:18.10f} {2:18.10f} \n'.format(*points)

        with folder.open(self.metadata.options.input_filename, 'w') as infile:
            for namelist_name in namelists_toprint:
                infile.write('&{0}\n'.format(namelist_name))
                # namelist content; set to {} if not present, so that we leave an empty namelist
                namelist = parameters.pop(namelist_name, {})
                for key, value in sorted(namelist.items()):
                    infile.write(convert_input_to_namelist_entry(key, value))
                infile.write('/\n')

            # add list of qpoints if required
            if postpend_text is not None:
                infile.write(postpend_text)

        if parameters:
            raise exceptions.InputValidationError(
                'The following namelists are specified in parameters, but are '
                'not valid namelists for the current type of calculation: '
                '{}'.format(','.join(list(parameters.keys()))))

        # copy the parent scratch
        symlink = settings.pop('PARENT_FOLDER_SYMLINK', self._default_symlink_usage)  # a boolean
        if symlink:
            # I create a symlink to each file/folder in the parent ./out
            folder.get_subfolder(self._OUTPUT_SUBFOLDER, create=True)

            remote_symlink_list.append((
                parent_folder_nscf.computer.uuid,
                os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf, '*'),
                self._OUTPUT_SUBFOLDER
            ))

        else:
            # here I copy the whole folder ./out
            remote_copy_list.append((
                parent_folder_nscf.computer.uuid,
                os.path.join(parent_folder_nscf.get_remote_path(), parent_calc_out_subfolder_nscf),
                self._OUTPUT_SUBFOLDER
            ))

        prefix = self._PREFIX

        for iqpt in range(1, nqpt+1):
            label = str(iqpt)
            tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-0')
            remote_copy_list.append((
                parent_folder_ph.computer.uuid,
                os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                'save/'+prefix+'.dyn_q0'))
            tmp_path = os.path.join(self._FOLDER_DYNAMICAL_MATRIX, 'dynamical-matrix-'+label)
            remote_copy_list.append((
                parent_folder_ph.computer.uuid,
                os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                'save/'+prefix+'.dyn_q'+label))

            if iqpt == 1:
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.dvscf*')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'+prefix+'.dvscf_q'+label))
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.phsave')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'))
            else:
                tmp_path = os.path.join(self._OUTPUT_SUBFOLDER, '_ph0/'+prefix+'.q_'+label+'/'+prefix+'.dvscf*')
                remote_copy_list.append((
                    parent_folder_ph.computer.uuid,
                    os.path.join(parent_folder_ph.get_remote_path(), tmp_path),
                    'save/'+prefix+'.dvscf_q'+label))

        codeinfo = datastructures.CodeInfo()
        codeinfo.cmdline_params = (list(settings.pop('CMDLINE', [])) + ['-in', self.metadata.options.input_filename])
        codeinfo.stdout_name = self.metadata.options.output_filename
        codeinfo.code_uuid = self.inputs.code.uuid

        calcinfo = datastructures.CalcInfo()
        calcinfo.codes_info = [codeinfo]
        calcinfo.local_copy_list = local_copy_list
        calcinfo.remote_copy_list = remote_copy_list
        calcinfo.remote_symlink_list = remote_symlink_list

        # Retrieve by default the output file
        calcinfo.retrieve_list = []
        calcinfo.retrieve_list.append(self.metadata.options.output_filename)
        calcinfo.retrieve_list += settings.pop('ADDITIONAL_RETRIEVE_LIST', [])

        if settings:
            unknown_keys = ', '.join(list(settings.keys()))
            raise exceptions.InputValidationError('`settings` contained unexpected keys: {}'.format(unknown_keys))

        return calcinfo
예제 #24
0
def get_stress(stress):
    """Return the final stress array."""
    stress_data = orm.ArrayData()
    stress_data.set_array(name='stress', array=stress.get_array('final'))

    return stress_data
예제 #25
0
def get_stress_from_trajectory(trajectory):
    """Return the stress array from the given trajectory data."""
    stress = orm.ArrayData()
    stress.set_array(name='stress', array=trajectory.get_array('stress')[-1])
    return stress
예제 #26
0
    def test_reexport(self, temp_dir):
        """
        Export something, import and reexport and check if everything is valid.
        The export is rather easy::

            ___       ___          ___
           |   | INP |   | CREATE |   |
           | p | --> | c | -----> | a |
           |___|     |___|        |___|

        """
        import numpy as np
        import string
        import random
        from datetime import datetime

        from aiida.common.hashing import make_hash

        def get_hash_from_db_content(grouplabel):
            """Helper function to get hash"""
            builder = orm.QueryBuilder()
            builder.append(orm.Dict, tag='param', project='*')
            builder.append(orm.CalculationNode,
                           tag='calc',
                           project='*',
                           edge_tag='p2c',
                           edge_project=('label', 'type'))
            builder.append(orm.ArrayData,
                           tag='array',
                           project='*',
                           edge_tag='c2a',
                           edge_project=('label', 'type'))
            builder.append(orm.Group,
                           filters={'label': grouplabel},
                           project='*',
                           tag='group',
                           with_node='array')
            # I want the query to contain something!
            self.assertTrue(builder.count() > 0)
            # The hash is given from the preservable entries in an export-import cycle,
            # uuids, attributes, labels, descriptions, arrays, link-labels, link-types:
            hash_ = make_hash([(
                item['param']['*'].attributes,
                item['param']['*'].uuid,
                item['param']['*'].label,
                item['param']['*'].description,
                item['calc']['*'].uuid,
                item['calc']['*'].attributes,
                item['array']['*'].attributes,
                [
                    item['array']['*'].get_array(name).tolist()
                    for name in item['array']['*'].get_arraynames()
                ],
                item['array']['*'].uuid,
                item['group']['*'].uuid,
                item['group']['*'].label,
                item['p2c']['label'],
                item['p2c']['type'],
                item['c2a']['label'],
                item['c2a']['type'],
                item['group']['*'].label,
            ) for item in builder.dict()])
            return hash_

        # Creating a folder for the import/export files
        chars = string.ascii_uppercase + string.digits
        size = 10
        grouplabel = 'test-group'

        nparr = np.random.random((4, 3, 2))  # pylint: disable=no-member
        trial_dict = {}
        # give some integers:
        trial_dict.update({str(k): np.random.randint(100) for k in range(10)})
        # give some floats:
        trial_dict.update({str(k): np.random.random() for k in range(10, 20)})  # pylint: disable=no-member
        # give some booleans:
        trial_dict.update(
            {str(k): bool(np.random.randint(1))
             for k in range(20, 30)})
        # give some text:
        trial_dict.update({
            str(k): ''.join(random.choice(chars) for _ in range(size))
            for k in range(20, 30)
        })

        param = orm.Dict(dict=trial_dict)
        param.label = str(datetime.now())
        param.description = 'd_' + str(datetime.now())
        param.store()
        calc = orm.CalculationNode()
        # setting also trial dict as attributes, but randomizing the keys)
        for key, value in trial_dict.items():
            calc.set_attribute(str(int(key) + np.random.randint(10)), value)
        array = orm.ArrayData()
        array.set_array('array', nparr)
        array.store()
        # LINKS
        # the calculation has input the parameters-instance
        calc.add_incoming(param,
                          link_type=LinkType.INPUT_CALC,
                          link_label='input_parameters')
        calc.store()
        # I want the array to be an output of the calculation
        array.add_incoming(calc,
                           link_type=LinkType.CREATE,
                           link_label='output_array')
        group = orm.Group(label='test-group')
        group.store()
        group.add_nodes(array)

        calc.seal()

        hash_from_dbcontent = get_hash_from_db_content(grouplabel)

        # I export and reimport 3 times in a row:
        for i in range(3):
            # Always new filename:
            filename = os.path.join(temp_dir, 'export-{}.aiida'.format(i))
            # Loading the group from the string
            group = orm.Group.get(label=grouplabel)
            # exporting based on all members of the group
            # this also checks if group memberships are preserved!
            export([group] + list(group.nodes), filename=filename, silent=True)
            # cleaning the DB!
            self.clean_db()
            self.create_user()
            # reimporting the data from the file
            import_data(filename, silent=True, ignore_unknown_nodes=True)
            # creating the hash from db content
            new_hash = get_hash_from_db_content(grouplabel)
            # I check for equality against the first hash created, which implies that hashes
            # are equal in all iterations of this process
            self.assertEqual(hash_from_dbcontent, new_hash)
예제 #27
0
def get_forces(forces):
    """Return the final forces array.."""
    forces_data = orm.ArrayData()
    forces_data.set_array(name='forces', array=forces.get_array('final'))

    return forces_data
예제 #28
0
def get_forces_from_trajectory(trajectory):
    """Return the forces array from the given trajectory data."""
    forces = orm.ArrayData()
    forces.set_array(name='forces', array=trajectory.get_array('forces')[-1])
    return forces
예제 #29
0
def get_crossing_and_lowgap_points(bands_data, gap_threshold):
    """Extract the low-gap points and crossings from the output of a `bands` calculation."""
    if not isinstance(bands_data, orm.BandsData):
        raise InputValidationError(
            'Invalide type {} for parameter `bands_data`'.format(
                type(bands_data)))
    if not isinstance(gap_threshold, orm.Float):
        raise InputValidationError(
            'Invalide type {} for parameter `gap_threshold`'.format(
                type(gap_threshold)))

    calculation = bands_data.creator
    gaps = get_gap_array_from_PwCalc(calculation)
    kpt_cryst = bands_data.get_kpoints()
    kpt_cart = bands_data.get_kpoints(cartesian=True)
    gap_thr = gap_threshold.value

    try:
        kki = calculation.inputs.kpoints.creator.inputs
        last_pinned = kki.centers.get_array('pinned')
        dist = kki.distance.value
    except:
        dist = 200
        last_pinned = np.array([[0., 0., 0.]])

    centers = KDTree(last_pinned)
    kpt_tree = KDTree(kpt_cart)
    query = centers.query_ball_tree(kpt_tree, r=dist * 1.74 / 2)  #~sqrt(3) / 2

    # Limiting fermi velocity to ~ v_f[graphene] * 3
    # GAP ~< dK * 10 / (#PT - 1)
    pinned_thr = dist * 4.00

    # Limiting number of new points per lowgap center based on distance between points
    lim = max(-5 // np.log10(dist), 1) if dist < 1 else 200
    if dist < 0.01:
        lim = 1
    where_pinned = []
    where_found = []
    for n, q in enumerate(query):
        q = np.array(q, dtype=np.int)

        if len(q) == 0:
            continue

        min_gap = gaps[q].min()

        # Skipping points where the gap didn't move much between iterations
        # _, i = kpt_tree.query(last_pinned[n])
        # prev_min_gap = gaps[i]
        # if min_gap / prev_min_gap > 0.95 and dist < 0.005:
        #     continue

        app = None
        scale = 2.5 if lim > 1 else 1.001
        if dist == 200:
            scale = 0.25 / min_gap
        while app is None or len(app) > lim:
            app = np.where(gaps[q] < min_gap * scale)[0]
            scale *= 0.98
            if scale < 1.0001:
                app = np.where(gaps[q] < min_gap * 1.0001)[0]
                break
        where_found.extend([q[i] for i in app if gaps[q[i]] <= gap_thr])
        where_pinned.extend(
            [q[i] for i in app if gap_thr < gaps[q[i]] < pinned_thr])

    # Removing dupicates and avoid exception for empty list
    where_pinned = np.array(where_pinned, dtype=np.int)
    where_pinned = np.unique(where_pinned)
    where_found = np.array(where_found, dtype=np.int)
    where_found = np.unique(where_found)

    res = orm.ArrayData()
    res.set_array('pinned', kpt_cart[where_pinned])
    res.set_array('found', kpt_cryst[where_found])

    return res
def get_forces(parameters):
    """Return the forces array from the given parameters node."""
    forces = orm.ArrayData()
    forces.set_array(name='forces', array=np.array(parameters.get_attribute('forces')))
    return forces