Ejemplo n.º 1
0
def main(options):

    ###### setting the lattice structure ######

    alat = 2.4955987320  # Angstrom
    the_cell = [[1.000000 * alat, 0.000000, 0.000000],
                [-0.500000 * alat, 0.866025 * alat, 0.000000],
                [0.000000, 0.000000, 6.4436359260]]

    atoms = Atoms('BNNB', [(1.2477994910, 0.7204172280, 0.0000000000),
                           (-0.0000001250, 1.4408346720, 0.0000000000),
                           (1.2477994910, 0.7204172280, 3.2218179630),
                           (-0.0000001250, 1.4408346720, 3.2218179630)],
                  cell=[1, 1, 1])
    atoms.set_cell(the_cell, scale_atoms=False)
    atoms.set_pbc([True, True, True])

    StructureData = DataFactory('structure')
    structure = StructureData(ase=atoms)

    ###### setting the kpoints mesh ######

    KpointsData = DataFactory('array.kpoints')
    kpoints = KpointsData()
    kpoints.set_kpoints_mesh([6, 6, 2])

    ###### setting the scf parameters ######

    Dict = DataFactory('dict')
    params_scf = {
        'CONTROL': {
            'calculation': 'scf',
            'verbosity': 'high',
            'wf_collect': True
        },
        'SYSTEM': {
            'ecutwfc': 130.,
            'force_symmorphic': True,
            'nbnd': 20
        },
        'ELECTRONS': {
            'mixing_mode': 'plain',
            'mixing_beta': 0.7,
            'conv_thr': 1.e-8,
            'diago_thr_init': 5.0e-6,
            'diago_full_acc': True
        },
    }

    parameter_scf = Dict(dict=params_scf)

    ###### creation of the workchain ######

    builder = PwBaseWorkChain.get_builder()
    builder.pw.structure = structure
    builder.pw.parameters = parameter_scf
    builder.kpoints = kpoints
    builder.pw.metadata.options.max_wallclock_seconds = \
            options['max_wallclock_seconds']
    builder.pw.metadata.options.resources = \
            dict = options['resources']

    if 'queue_name' in options:
        builder.pw.metadata.options.queue_name = options['queue_name']

    if 'qos' in options:
        builder.pw.metadata.options.qos = options['qos']

    if 'account' in options:
        builder.metadata.options.account = options['account']

    builder.pw.metadata.options.prepend_text = options['prepend_text']

    builder.pw.code = load_code(options['code_id'])
    builder.pw.pseudos = validate_and_prepare_pseudos_inputs(
        builder.pw.structure, pseudo_family=Str(options['pseudo_family']))

    return builder
Ejemplo n.º 2
0
"""
Calculations provided by aiida_ce.

Register calculations via the "aiida.calculations" entry point in setup.json.
"""
from __future__ import absolute_import

import six

from aiida.common import datastructures
from aiida.engine import CalcJob
from aiida.orm import SinglefileData
from aiida.plugins import DataFactory

DiffParameters = DataFactory('ce')


class DiffCalculation(CalcJob):
    """
    AiiDA calculation plugin wrapping the diff executable.

    Simple AiiDA plugin wrapper for 'diffing' two files.
    """
    @classmethod
    def define(cls, spec):
        """Define inputs and outputs of the calculation."""
        # yapf: disable
        super(DiffCalculation, cls).define(spec)
        spec.input('metadata.options.resources', valid_type=dict, default={'num_machines': 1, 'num_mpiprocs_per_machine': 1})
        spec.input('metadata.options.parser_name', valid_type=six.string_types, default='ce')
        spec.input('metadata.options.output_filename', valid_type=six.string_types, default='patch.diff')
Ejemplo n.º 3
0
#                                                                         #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file        #
# For further information please visit http://www.aiida.net               #
###########################################################################
from aiida import load_profile

from aiida.orm import Code
from aiida.plugins import DataFactory
from aiida.engine import submit
from aiida.orm.nodes.data.upf import get_pseudos_from_structure
from aiida.engine import run

load_profile()

StructureData = DataFactory('structure')
Dict = DataFactory('dict')
KpointsData = DataFactory('array.kpoints')

###############################
# Set your values here
codename = 'Quantum ESPRESSO@mbxp'
pseudo_family = 'pbe-spn-kjpaw_psl'
# These require setting up beforehand
###############################

code = Code.get_from_string(codename)
builder = code.get_builder()

# BaTiO3 cubic structure
alat = 4.  # angstrom
Ejemplo n.º 4
0
from aiida import load_dbenv, is_dbenv_loaded
if not is_dbenv_loaded():
    load_dbenv()

from aiida.engine import WorkChain, ToContext
from aiida.engine import workfunction

from aiida.plugins import Code, CalculationFactory, load_node, DataFactory, WorkflowFactory
from aiida.engine import run, submit

from aiida.orm import Str, Float, Bool, Int

import numpy as np

ForceConstantsData = DataFactory('phonopy.force_constants')
ForceSetsData = DataFactory('phonopy.force_sets')
PhononDosData = DataFactory('phonopy.phonon_dos')
NacData = DataFactory('phonopy.nac')

Dict = DataFactory('dict')
ArrayData = DataFactory('array')
StructureData = DataFactory('structure')

__testing__ = False


def generate_phono3py_params(structure,
                             parameters,
                             force_sets,
                             nac_data=None,
Ejemplo n.º 5
0
def test_run_nio_afm_fullopt(db_test_app, get_structure,
                             upload_basis_set_family, data_regression):
    # type: (AiidaTestApp) -> None
    """Test running a calculation."""
    code = db_test_app.get_or_create_code("crystal17.main")

    # Prepare input parameters
    params = {
        "title": "NiO Bulk with AFM spin",
        "geometry.optimise.type": "FULLOPTG",
        "scf.single": "UHF",
        "scf.k_points": (8, 8),
        "scf.spinlock.SPINLOCK": (0, 15),
        "scf.numerical.FMIXING": 30,
        "scf.post_scf": ["PPAN"],
    }

    instruct = get_structure("NiO_afm")

    kind_data = KindData(
        data={
            "kind_names": ["Ni1", "Ni2", "O"],
            "spin_alpha": [True, False, False],
            "spin_beta": [False, True, False],
        })

    sym_calc = run_get_node(
        WorkflowFactory("crystal17.sym3d"),
        structure=instruct,
        settings=DataFactory("dict")(dict={
            "symprec": 0.01,
            "compute_primitive": True
        }),
    ).node
    instruct = sym_calc.get_outgoing().get_node_by_label("structure")
    symmetry = sym_calc.get_outgoing().get_node_by_label("symmetry")

    upload_basis_set_family()

    # set up calculation
    process_class = code.get_builder().process_class
    builder = process_class.create_builder(
        params,
        instruct,
        "sto3g",
        symmetry=symmetry,
        kinds=kind_data,
        code=code,
        metadata=db_test_app.get_default_metadata(),
        unflatten=True,
    )

    output = run_get_node(builder)
    calc_node = output.node

    db_test_app.check_calculation(calc_node, ["results", "structure"])

    results_attributes = calc_node.outputs.results.attributes
    results_attributes.pop("execution_time_seconds")
    results_attributes.pop("parser_version")
    results_attributes = recursive_round(results_attributes, 9)

    data_regression.check(results_attributes)

    data_regression.check(
        recursive_round(calc_node.outputs.structure.attributes, 9),
        "test_run_nio_afm_fullopt_struct",
    )
Ejemplo n.º 6
0
# -*- coding: utf-8 -*-
"""Base class for data types representing pseudo potentials."""
import typing

from aiida.common.constants import elements
from aiida.common.exceptions import StoringNotAllowed
from aiida.common.files import md5_from_filelike
from aiida.plugins import DataFactory

SingleFileData = DataFactory('singlefile')  # pylint: disable=invalid-name

__all__ = ('PseudoPotentialData', )


class PseudoPotentialData(SingleFileData):
    """Base class for data types representing pseudo potentials."""

    _key_element = 'element'
    _key_md5 = 'md5'

    @classmethod
    def validate_element(cls, element: str):
        """Validate the given element symbol.

        :param element: the symbol of the element following the IUPAC naming standard.
        :raises ValueError: if the element symbol is invalid.
        """
        if element not in [values['symbol'] for values in elements.values()]:
            raise ValueError(f'`{element}` is not a valid element.')

    def validate_md5(self, md5: str):
# -*- coding: utf-8 -*-
"""Run RASPA calculation with blocked pockets."""
from __future__ import print_function
from __future__ import absolute_import
import os
import sys
import click

from aiida.common import NotExistent
from aiida.engine import run_get_pk, run
from aiida.orm import Code, Dict, SinglefileData
from aiida.plugins import DataFactory

# data objects
CifData = DataFactory('cif')  # pylint: disable=invalid-name


def example_block_pockets(raspa_code, submit=True):
    """Prepare and submit RASPA calculation with blocked pockets."""

    # parameters
    parameters = Dict(
        dict={
            "GeneralSettings": {
                "SimulationType": "MonteCarlo",
                "NumberOfCycles": 400,
                "NumberOfInitializationCycles": 200,
                "PrintEvery": 200,
                "Forcefield": "GenericMOFs",
                "EwaldPrecision": 1e-6,
                "CutOff": 12.0,
Ejemplo n.º 8
0
def main(options):

    ###### setting the gw parameters ######

    Dict = DataFactory('dict')

    params_gw = {
            'HF_and_locXC': True,
            'dipoles': True,
            'ppa': True,
            'gw0': True,
            'em1d': True,
            'Chimod': 'hartree',
            #'EXXRLvcs': 40,
            #'EXXRLvcs_units': 'Ry',
            'BndsRnXp': [1, 10],
            'NGsBlkXp': 2,
            'NGsBlkXp_units': 'Ry',
            'GbndRnge': [1, 10],
            'DysSolver': "n",
            'QPkrange': [[1, 1, 8, 9]],
            'DIP_CPU': "1 1 1",
            'DIP_ROLEs': "k c v",
            'X_CPU': "1 1 1 1",
            'X_ROLEs': "q k c v",
            'SE_CPU': "1 1 1",
            'SE_ROLEs': "q qp b",
        }
    params_gw = Dict(dict=params_gw)

    ###### creation of the YamboRestart ######

    builder = YamboRestart.get_builder()
    builder.yambo.metadata.options.max_wallclock_seconds = \
            options['max_wallclock_seconds']
    builder.yambo.metadata.options.resources = \
            dict = options['resources']

    if 'queue_name' in options:
        builder.yambo.metadata.options.queue_name = options['queue_name']

    if 'qos' in options:
        builder.yambo.metadata.options.qos = options['qos']

    if 'account' in options:
        builder.metadata.options.account = options['account']
        
    builder.yambo.metadata.options.prepend_text = options['prepend_text']

    builder.yambo.parameters = params_gw

    builder.yambo.precode_parameters = Dict(dict={})
    builder.yambo.settings = Dict(dict={'INITIALISE': False, 'COPY_DBS': False})

    builder.yambo.code = load_code(options['yambocode_id'])
    builder.yambo.preprocessing_code = load_code(options['yamboprecode_id'])

    builder.parent_folder = load_node(options['parent_pk']).outputs.remote_folder

    builder.max_iterations = Int(options['max_iterations'])

    return builder
Ejemplo n.º 9
0
def test_submit(network_code, submit=True):
    """Example of how to submit a zeo++ calculation.

    Simply copy the contents of this function into a script.
    """

    if not network_code:
        network_code = tests.get_code(entry_point='zeopp.network')

    # For allowed keys, print(NetworkParameters.schema)
    parameters = DataFactory('zeopp.parameters')(
        dict={
            'ha': 'LOW',  #just for speed; use 'DEF' for prodution!
            'cssr': True,  #converting to cssr
            'res': True,
            'sa': [1.86, 1.86, 1000],  #compute surface area
            'vol': [0.0, 0.0, 1000],  #compute gemetric pore volume
            #'block': [2.0, 100]  #compute blocking spheres for a big molecule
        })

    this_dir = os.path.dirname(os.path.realpath(__file__))
    structure = DataFactory('cif')(file=os.path.join(this_dir, 'HKUST-1.cif'))

    # set up calculation
    inputs = {
        'code': network_code,
        'parameters': parameters,
        'structure': structure,
        'metadata': {
            'options': {
                'max_wallclock_seconds': 1 * 60,
            },
            'label':
            'aiida_zeopp example calculation',
            'description':
            'Converts .cif to .cssr format, computes surface area, and pore volume',
        },
    }

    NetworkCalculation = CalculationFactory('zeopp.network')  # pylint: disable=invalid-name
    print('Running NetworkCalculation: please wait...')
    if submit:
        engine.submit(NetworkCalculation, **inputs)
    else:
        result, node = engine.run_get_node(
            NetworkCalculation, **inputs)  # or use aiida.engine.submit

        print('NetworkCalculation<{}> terminated.'.format(node.pk))

        print('\nComputed output_parameters {}\n'.format(
            str(result['output_parameters'])))
        outputs = result['output_parameters'].get_dict()

        print('Density ({}): {:.3f}'.format(outputs['Density_unit'],
                                            outputs['Density']))

        print('Largest free sphere ({}): {:.3f}'.format(
            outputs['Largest_free_sphere_unit'],
            outputs['Largest_free_sphere']))

        print('Largest included sphere ({}): {:.3f}'.format(
            outputs['Largest_included_sphere_unit'],
            outputs['Largest_included_sphere']))

        print('Nitrogen accessible surface area ({}): {:.3f}'.format(
            outputs['ASA_m^2/g_unit'], outputs['ASA_m^2/g']))

        print('Geometric pore volume ({}): {:.3f}'.format(
            outputs['AV_cm^3/g_unit'], outputs['AV_cm^3/g']))

        # print('Number of blocking spheres needed for probe radius of {:.2f}A: {}'.
        #       format(
        #           outputs['Input_block'][0],
        #           outputs['Number_of_blocking_spheres']))
        # print('Blocking spheres file: SinglefileData<{}>'.format(
        #     node.outputs.block.pk))

        print('CSSR structure: SinglefileData<{}>'.format(
            node.outputs.structure_cssr.pk))
Ejemplo n.º 10
0
#!/usr/bin/env python  # pylint: disable=invalid-name
# -*- coding: utf-8 -*-
"""Run example isotherm calculation with HKUST1 framework."""

import os
import click

from aiida.engine import run
from aiida.plugins import DataFactory, WorkflowFactory
from aiida.orm import Code, Dict, Str

# Workchain objects
IsothermWorkChain = WorkflowFactory('lsmo.isotherm')  # pylint: disable=invalid-name

# Data objects
CifData = DataFactory('cif')  # pylint: disable=invalid-name
NetworkParameters = DataFactory('zeopp.parameters')  # pylint: disable=invalid-name


@click.command('cli')
@click.argument('raspa_code_label')
@click.argument('zeopp_code_label')
def main(raspa_code_label, zeopp_code_label):
    """Prepare inputs and submit the Isotherm workchain.
    Usage: verdi run run_isotherm_hkust1.py raspa@localhost network@localhost"""

    builder = IsothermWorkChain.get_builder()

    builder.metadata.label = "test"

    builder.raspa_base.raspa.code = Code.get_from_string(raspa_code_label)
Ejemplo n.º 11
0
    def test_fleur_fleurinpdata_entry_point(self):
        from aiida.plugins import DataFactory
        from aiida_fleur.data.fleurinp import FleurinpData

        fleurinp = DataFactory('fleur.fleurinp')
        assert fleurinp == FleurinpData
Ejemplo n.º 12
0
def launch_cif_clean(cif_filter, cif_select, group_cif_raw, group_cif_clean, group_structure, group_workchain, node,
    max_entries, skip_check, parse_engine, daemon):
    """Run the `CifCleanWorkChain` on the entries in a group with raw imported CifData nodes.

    It will use the `cif_filter` and `cif_select` scripts of `cod-tools` to clean the input cif file. Additionally, if
    the `group-structure` option is passed, the workchain will also attempt to use the given parse engine to parse the
    cleaned `CifData` to obtain the structure and then use SeeKpath to find the primitive structure, which, if
    successful, will be added to the `group-structure` group.
    """
    # pylint: disable=too-many-arguments,too-many-locals,too-many-statements,too-many-branches
    import inspect
    from datetime import datetime

    from aiida import orm
    from aiida.engine import launch
    from aiida.plugins import DataFactory, WorkflowFactory
    from aiida_codtools.cli.utils.display import echo_utc
    from aiida_codtools.common.resources import get_default_options
    from aiida_codtools.common.utils import get_input_node

    CifData = DataFactory('cif')  # pylint: disable=invalid-name
    CifCleanWorkChain = WorkflowFactory('codtools.cif_clean')  # pylint: disable=invalid-name

    # Collect the dictionary of not None parameters passed to the launch script and print to screen
    local_vars = locals()
    launch_paramaters = {}
    for arg in inspect.getargspec(launch_cif_clean.callback).args:  # pylint: disable=deprecated-method
        if arg in local_vars and local_vars[arg]:
            launch_paramaters[arg] = local_vars[arg]

    click.echo('=' * 80)
    click.echo('Starting on {}'.format(datetime.utcnow().isoformat()))
    click.echo('Launch parameters: {}'.format(launch_paramaters))
    click.echo('-' * 80)

    if group_cif_raw is not None:

        # Get CifData nodes that should actually be submitted according to the input filters
        builder = orm.QueryBuilder()
        builder.append(orm.Group, filters={'id': {'==': group_cif_raw.pk}}, tag='group')

        if skip_check:
            builder.append(CifData, with_group='group', project=['*'])
        else:
            # Get CifData nodes that already have an associated workchain node in the `group_workchain` group.
            submitted = orm.QueryBuilder()
            submitted.append(orm.WorkChainNode, tag='workchain')
            submitted.append(orm.Group, filters={'id': {'==': group_workchain.pk}}, with_node='workchain')
            submitted.append(orm.CifData, with_outgoing='workchain', tag='data', project=['id'])
            submitted_nodes = set(pk for entry in submitted.all() for pk in entry)

            if submitted_nodes:
                filters = {'id': {'!in': submitted_nodes}}
            else:
                filters = {}

            # Get all CifData nodes that are not included in the submitted node list
            builder.append(CifData, with_group='group', filters=filters, project=['*'])

        if max_entries is not None:
            builder.limit(int(max_entries))

        nodes = [entry[0] for entry in builder.all()]

    elif node is not None:

        nodes = [node]

    else:
        raise click.BadParameter('you have to specify either --group-cif-raw or --node')

    counter = 0

    node_cif_filter_parameters = get_input_node(orm.Dict, {
        'fix-syntax-errors': True,
        'use-c-parser': True,
        'use-datablocks-without-coordinates': True,
    })

    node_cif_select_parameters = get_input_node(orm.Dict, {
        'canonicalize-tag-names': True,
        'dont-treat-dots-as-underscores': True,
        'invert': True,
        'tags': '_publ_author_name,_citation_journal_abbrev',
        'use-c-parser': True,
    })

    node_parse_engine = get_input_node(orm.Str, parse_engine)
    node_site_tolerance = get_input_node(orm.Float, 5E-4)
    node_symprec = get_input_node(orm.Float, 5E-3)

    for cif in nodes:

        inputs = {
            'cif': cif,
            'cif_filter': {
                'code': cif_filter,
                'parameters': node_cif_filter_parameters,
                'metadata': {
                    'options': get_default_options()
                }
            },
            'cif_select': {
                'code': cif_select,
                'parameters': node_cif_select_parameters,
                'metadata': {
                    'options': get_default_options()
                }
            },
            'parse_engine': node_parse_engine,
            'site_tolerance': node_site_tolerance,
            'symprec': node_symprec,
        }

        if group_cif_clean is not None:
            inputs['group_cif'] = group_cif_clean

        if group_structure is not None:
            inputs['group_structure'] = group_structure

        if daemon:
            workchain = launch.submit(CifCleanWorkChain, **inputs)
            echo_utc('CifData<{}> submitting: {}<{}>'.format(cif.pk, CifCleanWorkChain.__name__, workchain.pk))
        else:
            echo_utc('CifData<{}> running: {}'.format(cif.pk, CifCleanWorkChain.__name__))
            _, workchain = launch.run_get_node(CifCleanWorkChain, **inputs)

        if group_workchain is not None:
            group_workchain.add_nodes([workchain])

        counter += 1

        if max_entries is not None and counter >= max_entries:
            break

    click.echo('-' * 80)
    click.echo('Submitted {} new workchains'.format(counter))
    click.echo('Stopping on {}'.format(datetime.utcnow().isoformat()))
    click.echo('=' * 80)
Ejemplo n.º 13
0
from aiida.common.utils import classproperty
from aiida.common.exceptions import (InputValidationError, ValidationError)
from aiida.common.datastructures import (CalcInfo, CodeInfo)
from aiida.plugins import DataFactory
from aiida_kkr.tools.common_workfunctions import generate_inputcard_from_structure, check_2Dinput_consistency, vca_check
from aiida.common.exceptions import UniquenessError, NotExistent
import os
import six

__copyright__ = (u"Copyright (c), 2017, Forschungszentrum Jülich GmbH, "
                 "IAS-1/PGI-1, Germany. All rights reserved.")
__license__ = "MIT license, see LICENSE.txt file"
__version__ = "0.5.1"
__contributors__ = ("Jens Broeder", "Philipp Rüßmann")

Dict = DataFactory('dict')
StructureData = DataFactory('structure')
RemoteData = DataFactory('remote')
SingleFileData = DataFactory('singlefile')


class VoronoiCalculation(CalcJob):
    """
    AiiDA calculation plugin for a voronoi calculation (creation of starting potential and shapefun)
    .
    """

    ####################
    # File names etc.
    ####################
    # calculation plugin version
Ejemplo n.º 14
0
    def test_dos_startpot_wc(self):
        """
        simple Cu noSOC, FP, lmax2 full example using scf workflow for impurity host-in-host
        """
        from aiida.orm import Code, load_node
        from aiida.plugins import DataFactory
        from aiida.orm.querybuilder import QueryBuilder
        from masci_tools.io.kkr_params import kkrparams
        from aiida_kkr.workflows.kkr_imp_dos import kkr_imp_dos_wc
        from numpy import array

        Dict = DataFactory('dict')
        StructureData = DataFactory('structure')

        # prepare computer and code (needed so that
        prepare_code(kkrimp_codename, codelocation, computername, workdir)
        prepare_code(kkr_codename, codelocation, computername, workdir)

        wfd = kkr_imp_dos_wc.get_wf_defaults()

        options = {
            'queue_name': queuename,
            'resources': {
                "num_machines": 1
            },
            'max_wallclock_seconds': 5 * 60,
            'use_mpi': False,
            'custom_scheduler_commands': ''
        }
        options = Dict(dict=options)

        # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations
        KKRimpCode = Code.get_from_string(kkrimp_codename + '@' + computername)
        KKRCode = Code.get_from_string(kkr_codename + '@' + computername)

        # import previous GF writeout
        from aiida.tools.importexport import import_data
        import_data('files/db_dump_kkrflex_create.tar.gz')
        GF_host_calc = load_node('baabef05-f418-4475-bba5-ef0ee3fd5ca6')

        # now create a SingleFileData node containing the impurity starting potential
        from aiida_kkr.tools.common_workfunctions import neworder_potential_wf
        from numpy import loadtxt
        neworder_pot1 = [
            int(i)
            for i in loadtxt(GF_host_calc.outputs.retrieved.open('scoef'),
                             skiprows=1)[:, 3] - 1
        ]
        settings_dict = {
            'pot1': 'out_potential',
            'out_pot': 'potential_imp',
            'neworder': neworder_pot1
        }
        settings = Dict(dict=settings_dict)
        startpot_imp_sfd = neworder_potential_wf(
            settings_node=settings,
            parent_calc_folder=GF_host_calc.outputs.remote_folder)

        label = 'kkrimp_dos Cu host_in_host'
        descr = 'kkrimp_dos workflow for Cu bulk'

        imp_info = GF_host_calc.inputs.impurity_info.get_dict()
        imp_info['Rcut'] = 2.5533
        print(imp_info)

        # create process builder to set parameters
        builder = kkr_imp_dos_wc.get_builder()
        builder.metadata.description = descr
        builder.metadata.label = label
        builder.options = options
        builder.kkr = KKRCode
        builder.kkrimp = KKRimpCode
        builder.imp_pot_sfd = startpot_imp_sfd
        builder.wf_parameters = Dict(dict=wfd)
        builder.impurity_info = Dict(dict=imp_info)
        builder.host_remote = GF_host_calc.outputs.remote_folder

        # now run calculation
        from aiida.engine import run
        print(builder)
        out = run(builder)
        print(out)

        assert 'last_calc_info' in out.keys()
        assert 'last_calc_output_parameters' in out.keys()
        assert 'workflow_info' in out.keys()
        assert 'dos_data' in out.keys()
        assert 'dos_data_interpol' in out.keys()
        assert len(out['dos_data_interpol'].get_y()) == 5
        assert len(out['dos_data_interpol'].get_y()[0]) == 3
        assert len(out['dos_data_interpol'].get_y()[0][0]) == 20
Ejemplo n.º 15
0
def main(options):

    ###### setting the lattice structure ######

    alat = 2.4955987320  # Angstrom
    the_cell = [[1.000000 * alat, 0.000000, 0.000000],
                [-0.500000 * alat, 0.866025 * alat, 0.000000],
                [0.000000, 0.000000, 6.4436359260]]

    atoms = Atoms('BNNB', [(1.2477994910, 0.7204172280, 0.0000000000),
                           (-0.0000001250, 1.4408346720, 0.0000000000),
                           (1.2477994910, 0.7204172280, 3.2218179630),
                           (-0.0000001250, 1.4408346720, 3.2218179630)],
                  cell=[1, 1, 1])
    atoms.set_cell(the_cell, scale_atoms=False)
    atoms.set_pbc([True, True, True])

    StructureData = DataFactory('structure')
    structure = StructureData(ase=atoms)

    ###### setting the kpoints mesh ######

    KpointsData = DataFactory('array.kpoints')
    kpoints = KpointsData()
    kpoints.set_kpoints_mesh([6, 6, 2])

    ###### setting the scf parameters ######

    Dict = DataFactory('dict')
    params_scf = {
        'CONTROL': {
            'calculation': 'scf',
            'verbosity': 'high',
            'wf_collect': True
        },
        'SYSTEM': {
            'ecutwfc': 130.,
            'force_symmorphic': True,
            'nbnd': 20
        },
        'ELECTRONS': {
            'mixing_mode': 'plain',
            'mixing_beta': 0.7,
            'conv_thr': 1.e-8,
            'diago_thr_init': 5.0e-6,
            'diago_full_acc': True
        },
    }

    parameter_scf = Dict(dict=params_scf)

    params_nscf = {
        'CONTROL': {
            'calculation': 'nscf',
            'verbosity': 'high',
            'wf_collect': True
        },
        'SYSTEM': {
            'ecutwfc': 130.,
            'force_symmorphic': True,
            'nbnd': 500
        },
        'ELECTRONS': {
            'mixing_mode': 'plain',
            'mixing_beta': 0.6,
            'conv_thr': 1.e-8,
            'diagonalization': 'david',
            'diago_thr_init': 5.0e-6,
            'diago_full_acc': True
        },
    }

    parameter_nscf = Dict(dict=params_nscf)

    KpointsData = DataFactory('array.kpoints')
    kpoints = KpointsData()
    kpoints.set_kpoints_mesh([6, 6, 2])

    alat = 2.4955987320  # Angstrom
    the_cell = [[1.000000 * alat, 0.000000, 0.000000],
                [-0.500000 * alat, 0.866025 * alat, 0.000000],
                [0.000000, 0.000000, 6.4436359260]]

    atoms = Atoms('BNNB', [(1.2477994910, 0.7204172280, 0.0000000000),
                           (-0.0000001250, 1.4408346720, 0.0000000000),
                           (1.2477994910, 0.7204172280, 3.2218179630),
                           (-0.0000001250, 1.4408346720, 3.2218179630)],
                  cell=[1, 1, 1])
    atoms.set_cell(the_cell, scale_atoms=False)
    atoms.set_pbc([True, True, True])

    StructureData = DataFactory('structure')
    structure = StructureData(ase=atoms)

    params_gw = {
        'HF_and_locXC': True,
        'dipoles': True,
        'ppa': True,
        'gw0': True,
        'em1d': True,
        'Chimod': 'hartree',
        #'EXXRLvcs': 40,
        #'EXXRLvcs_units': 'Ry',
        'BndsRnXp': [1, 10],
        'NGsBlkXp': 2,
        'NGsBlkXp_units': 'Ry',
        'GbndRnge': [1, 10],
        'DysSolver': "n",
        'QPkrange': [[1, 1, 8, 9]],
        'DIP_CPU': "1 1 1",
        'DIP_ROLEs': "k c v",
        'X_CPU': "1 1 1 1",
        'X_ROLEs': "q k c v",
        'SE_CPU': "1 1 1",
        'SE_ROLEs': "q qp b",
    }
    params_gw = Dict(dict=params_gw)

    builder = YamboConvergence.get_builder()

    ##################scf+nscf part of the builder
    builder.ywfl.scf.pw.structure = structure
    builder.ywfl.scf.pw.parameters = parameter_scf
    builder.kpoints = kpoints
    builder.ywfl.scf.pw.metadata.options.max_wallclock_seconds = \
            options['max_wallclock_seconds']
    builder.ywfl.scf.pw.metadata.options.resources = \
            dict = options['resources']

    if 'queue_name' in options:
        builder.ywfl.scf.pw.metadata.options.queue_name = options['queue_name']

    if 'qos' in options:
        builder.ywfl.scf.pw.metadata.options.qos = options['qos']

    if 'account' in options:
        builder.ywfl.scf.pw.metadata.options.account = options['account']

    builder.ywfl.scf.pw.metadata.options.prepend_text = options['prepend_text']

    builder.ywfl.nscf.pw.structure = builder.ywfl.scf.pw.structure
    builder.ywfl.nscf.pw.parameters = parameter_nscf
    builder.ywfl.nscf.pw.metadata = builder.ywfl.scf.pw.metadata

    builder.ywfl.scf.pw.code = load_code(options['pwcode_id'])
    builder.ywfl.nscf.pw.code = load_code(options['pwcode_id'])
    builder.ywfl.scf.pw.pseudos = validate_and_prepare_pseudos_inputs(
        builder.ywfl.scf.pw.structure,
        pseudo_family=Str(options['pseudo_family']))
    builder.ywfl.nscf.pw.pseudos = builder.ywfl.scf.pw.pseudos

    ##################yambo part of the builder
    builder.ywfl.yres.yambo.metadata.options.max_wallclock_seconds = \
            options['max_wallclock_seconds']
    builder.ywfl.yres.yambo.metadata.options.resources = \
            dict = options['resources']

    if 'queue_name' in options:
        builder.ywfl.yres.yambo.metadata.options.queue_name = options[
            'queue_name']

    if 'qos' in options:
        builder.ywfl.yres.yambo.metadata.options.qos = options['qos']

    if 'account' in options:
        builder.ywfl.yres.yambo.metadata.options.account = options['account']

    builder.ywfl.yres.yambo.parameters = params_gw
    builder.ywfl.yres.yambo.precode_parameters = Dict(dict={})
    builder.ywfl.yres.yambo.settings = Dict(dict={
        'INITIALISE': False,
        'COPY_DBS': False
    })
    builder.ywfl.yres.max_iterations = Int(5)

    builder.ywfl.yres.yambo.preprocessing_code = load_code(
        options['yamboprecode_id'])
    builder.ywfl.yres.yambo.code = load_code(options['yambocode_id'])
    try:
        builder.parent_folder = load_node(
            options['parent_pk']).outputs.remote_folder
    except:
        pass

    builder.p2y = builder.ywfl
    builder.precalc = builder.ywfl  #for simplicity, to specify if PRE_CALC is True

    builder.workflow_settings = Dict(
        dict={
            'type': '1D_convergence',
            'what': 'gap',
            'where': [(1, 8, 1, 9)],
            'PRE_CALC': False,
        })

    #'what': 'single-levels','where':[(1,8),(1,9)]
    var_to_conv = [{'var':['BndsRnXp','GbndRnge','NGsBlkXp'],'delta': [[0,100],[0,100],2], 'steps': 3, 'max_iterations': 2, \
                                 'conv_thr': 0.05,},
                   {'var':['BndsRnXp','GbndRnge'],'delta': [[0,100],[0,100]], 'steps': 3, 'max_iterations': 2, \
                                 'conv_thr': 0.02,},
                   {'var':'NGsBlkXp','delta': 2, 'steps': 3, 'max_iterations': 2, \
                                'conv_thr': 0.02,},
                   {'var':['BndsRnXp','GbndRnge','NGsBlkXp'],'delta': [[0,150],[0,150],2], 'steps': 3, 'max_iterations': 2, \
                                 'conv_thr': 0.02,},
                   {'var':'kpoint_mesh','delta': [2,2,0], 'max_iterations': 2, \
                                 'conv_thr': 0.1,},]

    for i in range(len(var_to_conv)):
        print('{}-th variable will be {}'.format(i + 1, var_to_conv[i]['var']))

    builder.parameters_space = List(list=var_to_conv)

    return builder
Ejemplo n.º 16
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This test runs the Fleur band workflow
"""
from __future__ import absolute_import
from aiida import load_dbenv, is_dbenv_loaded
if not is_dbenv_loaded():
    load_dbenv(profile='aiida_test')
from aiida.plugins import Code, DataFactory
from aiida.orm import load_node
#from aiida.work.run import run
from aiida_fleur.workflows.band import FleurBandWorkChain

StructureData = DataFactory('structure')
ParameterData = DataFactory('parameter')
KpointsData = DataFactory('array.kpoints')
FleurinpData = DataFactory('fleur.fleurinp')

###############################
# Set your values here
codename2 = 'fleur_iff@local_iff'  #'fleur_iff003_v0_27@iff003'
codename2 = 'fleur_iff003_v0_27@iff003'
###############################

code2 = Code.get_from_string(codename2)

fleurinp = load_node(1684)
fleur_calc = load_node(1693)
remote = fleur_calc.out.remote_folder
wf_para = Dict(dict={'queue': 'th123_node'})
Ejemplo n.º 17
0
# -*- coding: utf-8 -*-
"""Group to represent a pseudo potential family with pseudos in UPF format."""
from typing import Union

from aiida.common.lang import type_check
from aiida.plugins import DataFactory

from .pseudo import PseudoPotentialFamily

__all__ = ('UpfFamily', )

StructureData = DataFactory('structure')  # pylint: disable=invalid-name
UpfData = DataFactory('pseudo.upf')  # pylint: disable=invalid-name


class UpfFamily(PseudoPotentialFamily):
    """Group to represent a pseudo potential family with pseudos in UPF format."""

    _pseudo_type = UpfData
    _key_cutoffs = '_cutoffs'

    @classmethod
    def validate_cutoffs(cls, elements: set, cutoffs: dict) -> None:
        """Validate a cutoff dictionary for a given set of elements.

        :param elements: set of elements for which to validate the cutoffs dictionary.
        :param cutoffs: dictionary with recommended cutoffs. Format: keys are the element symbols and the values are
            dictionaries themselves, each with two keys, ``cutoff_wfc`` and ``cutoff_rho``, containing a float value
            with the recommended cutoff to be used for the wave functions and charge density, respectively.
        :raises ValueError: if the set of elements and those defined in the cutoffs do not match exactly, or if the
            cutoffs dictionary has an invalid format.
Ejemplo n.º 18
0
def test_process(logger_code):
    """
    Test running a calculation.

    Also checks its outputs.
    """
    from aiida.plugins import DataFactory, CalculationFactory
    from aiida.engine import run
    from aiida.common.extendeddicts import AttributeDict

    from aiida_logger.tests import TEST_DIR  # pylint: disable=wrong-import-position

    # Prepare input parameters
    parameters = AttributeDict()
    parameters.type = 'gc'
    parameters.comment_line = 0
    parameters.data_start_line = 2
    parameters.data_layout = [[{
        'time': '%m/%d/%y %H:%M:%S'
    }, {
        'id': 'int'
    }, {
        'He concentration': 'float'
    }, {
        'H2 concentration': 'float'
    }, {
        'O2 concentration': 'float'
    }, {
        'N2 concentration': 'float'
    }, {
        'CH4 concentration': 'float'
    }, {
        'CO concentration': 'float'
    }, {
        'ignore': 'float'
    }, {
        'He area': 'float'
    }, {
        'H2 area': 'float'
    }, {
        'O2 area': 'float'
    }, {
        'N2 area': 'float'
    }, {
        'CH4 area': 'float'
    }, {
        'CO area': 'float'
    }],
                              [{
                                  'time': '%m/%d/%y %H:%M:%S'
                              }, {
                                  'id': 'int'
                              }, {
                                  'CO2 concentration': 'float'
                              }, {
                                  'H2O concentration': 'float'
                              }, {
                                  'ignore': 'float'
                              }, {
                                  'CO2 area': 'float'
                              }, {
                                  'H2O area': 'float'
                              }]]
    parameters.separator = '\t'

    # Define input files to use
    SinglefileData = DataFactory('singlefile')
    datafile = SinglefileData(
        file=os.path.join(TEST_DIR, 'input_files', 'gc_example.txt'))

    # Set up calculation
    inputs = {
        'code': logger_code,
        'parameters': DataFactory('dict')(dict=parameters),
        'datafiles': {
            'datafile': datafile
        },
        'metadata': {
            'options': {
                'resources': {
                    'num_machines': 1,
                    'num_mpiprocs_per_machine': 1
                },
                'parser_name': 'logger',
                'withmpi': False,
                'output_filename': 'logger.out'
            },
            'description': 'Test job submission with the aiida_logger plugin'
        },
    }
    result = run(CalculationFactory('logger'), **inputs)

    assert 'data' in result
    assert 'metadata' in result

    data = result['data']
    metadata = result['metadata']
    metadata = metadata.get_dict()
    assert 'labels' in metadata
    assert 'comments' in metadata
    assert metadata['labels'] == [[
        'He concentration', 'H2 concentration', 'O2 concentration',
        'N2 concentration', 'CH4 concentration', 'CO concentration', 'He area',
        'H2 area', 'O2 area', 'N2 area', 'CH4 area', 'CO area'
    ], ['CO2 concentration', 'H2O concentration', 'CO2 area', 'H2O area']]
    assert metadata['comments'] == None
    test_array = np.array(
        [[
            1.67940000e+00, 5.30499200e+02, 1.01360000e+00, 5.01270000e+00,
            1.79290000e+00, 1.31270000e+00, 4.81157000e+05, 5.63774800e+06,
            5.34600000e+03, 6.81400000e+03, 6.41225000e+05, 2.86470000e+05
        ],
         [
             2.03480000e+00, 4.68857000e+01, 6.76100000e-01, 2.60000000e+00,
             3.03790000e+00, 3.00000000e+00, 4.47750000e+04, 4.32223080e+07,
             1.56494000e+05, 5.00000000e+00, 8.11070000e+04, 2.00000000e+00
         ],
         [
             3.95690000e+00, 4.61128900e+02, 6.00000000e+00, 4.00000000e+00,
             4.64040000e+00, 3.09260000e+00, 7.12950000e+05, 4.08505819e+08,
             3.00000000e+00, 4.00000000e+00, 9.12864000e+05, 4.66342600e+06
         ],
         [
             4.04320000e+00, 4.72441000e+01, 6.84600000e-01, 4.00000000e+00,
             2.05930000e+00, 5.00250000e+00, 5.78310000e+04, 3.85849600e+06,
             2.37953000e+05, 3.00000000e+00, 4.30150000e+04, 5.74000000e+02
         ],
         [
             5.98640000e+00, 6.61407300e+02, 5.10400000e+00, 6.00000000e+00,
             6.85040000e+00, 4.88030000e+00, 6.50328000e+05, 4.90000150e+07,
             1.79270000e+04, 1.00000000e+00, 2.02969900e+06, 8.19799000e+05
         ],
         [
             6.04560000e+00, 6.72249000e+01, 0.00000000e+00, 6.00000000e+00,
             8.06700000e+00, 3.03250000e+00, 7.78050000e+04, 3.28244070e+07,
             1.00000000e+00, 1.00000000e+00, 3.72850000e+04, 1.67500000e+03
         ],
         [
             7.98970000e+00, 6.61008500e+02, 7.00000000e+00, 9.00000000e+00,
             1.31780000e+00, 5.71760000e+00, 5.54532000e+05, 8.90829209e+09,
             2.00000000e+00, 6.00000000e+00, 2.28981500e+06, 8.86354000e+05
         ]])
    np.testing.assert_allclose(data.get_array('channel_1'), test_array)
    test_array = np.array(
        [[6.763590e+01, 6.672400e+00, 3.569060e+05, 5.135330e+05],
         [1.054000e+00, 8.536100e+00, 4.834000e+04, 5.545181e+06],
         [2.647100e+00, 8.790090e+01, 5.776000e+03, 6.738000e+03],
         [3.061300e+00, 4.528800e+00, 5.216600e+04, 8.658100e+04],
         [6.312000e-01, 8.868000e-01, 8.564457e+06, 7.721000e+03],
         [2.064600e+00, 5.518600e+00, 1.390600e+04, 6.180700e+04],
         [6.746800e+00, 3.891000e+00, 3.151360e+05, 5.453800e+04]])
    np.testing.assert_allclose(data.get_array('channel_2'), test_array)
Ejemplo n.º 19
0
 def get_structure(self):
     """Returns aiida StructureData"""
     from aiida.plugins import DataFactory
     return DataFactory('structure')(ase=self.get_ase())
Ejemplo n.º 20
0
def plot_fleur_mn(nodelist, save=False, show=True, backend='bokeh'):
    """
    This methods takes any amount of AiiDA node as a list and starts
    the standard visualisation for it, if it finds one.

    Some nodes types it tries to display together if it knows how to.
    and if they are given as a list.

    param: save showed the plots be saved automatically

    """
    ###
    # Things to plot together
    all_nodes = {}
    ###
    ParameterData = DataFactory('dict')

    if not isinstance(nodelist, list):
        print(('The nodelist provided: {}, type {} is not a list. ' 'I abort'.format(nodelist, type(nodelist))))
        return None

    node_labels = []
    for node in nodelist:
        # first find out what we have then how to visualize
        if isinstance(node, int):  #pk
            node = load_node(node)
        if isinstance(node, (str, six.text_type)):  #uuid
            node = load_node(node)  #try

        if isinstance(node, Node):
            node_labels.append(node.label)
            if isinstance(node, WorkChainNode):
                output_list = node.get_outgoing().all()
                for out_link in output_list:
                    if 'output_' in out_link.link_label:
                        if 'wc' in out_link.link_label or 'wf' in out_link.link_label:
                            if 'para' in out_link.link_label:  # We are just looking for parameter
                                #nodes, structures, bands, dos and so on we tread different
                                node = out_link.node  # we only visualize last output node
            if isinstance(node, ParameterData):
                p_dict = node.get_dict()
                workflow_name = p_dict.get('workflow_name', None)
                cur_list = all_nodes.get(workflow_name, [])
                cur_list.append(node)
                all_nodes[workflow_name] = cur_list
            else:
                print(('I do not know how to visualize this node: {}, '
                       'type {} from the nodelist length {}'.format(node, type(node), len(nodelist))))
        else:
            print(('The node provided: {} of type {} in the nodelist length {}'
                   ' is not an AiiDA object'.format(node, type(node), len(nodelist))))

    #print(all_nodes)
    all_plot_res = []
    for node_key, nodelist1 in six.iteritems(all_nodes):
        try:
            plotf = FUNCTIONS_DICT[node_key]
        except KeyError:
            print(('Sorry, I do not know how to visualize'
                   ' these nodes (multiplot): {} {}'.format(node_key, nodelist1)))
            continue
        plot_res = plotf(nodelist1, labels=node_labels, save=save, show=show, backend=backend)
        all_plot_res.append(plot_res)
    return all_plot_res
Ejemplo n.º 21
0
import numpy as np
import phonopy
from phonopy.harmonic.displacement import get_displacements_and_forces
from aiida.engine import WorkChain
from aiida.plugins import WorkflowFactory, DataFactory
from aiida.orm import Float, Int, QueryBuilder, Group, load_node
from aiida.engine import while_, if_, calcfunction
from aiida_phonopy.common.utils import phonopy_atoms_from_structure

Dict = DataFactory('dict')
PhonopyWorkChain = WorkflowFactory('phonopy.phonopy')


@calcfunction
def get_random_displacements(structure, number_of_snapshots, temperature,
                             **data):
    displacements = []
    forces = []
    energies = []

    for i in range(len(data) // 2):
        forces.append(data['forces_%d' % (i + 1)].get_array('force_sets'))
        if 'energies' in data['forces_%d' % (i + 1)].get_arraynames():
            energies.append(data['forces_%d' % (i + 1)].get_array('energies'))
        phonon_setting_info = data['ph_info_%d' % (i + 1)]
        dataset = phonon_setting_info['displacement_dataset']
        disps, _ = get_displacements_and_forces(dataset)
        displacements.append(disps)
    d = np.concatenate(displacements, axis=0)
    f = np.concatenate(forces, axis=0)
Ejemplo n.º 22
0
    def test_kkrimp_full_wc(self):
        """
        simple Cu noSOC, FP, lmax2 full example using scf workflow for impurity host-in-host
        """
        from aiida.orm import Code, load_node
        from aiida.plugins import DataFactory
        from aiida.orm.querybuilder import QueryBuilder
        from masci_tools.io.kkr_params import kkrparams
        from aiida_kkr.workflows.kkr_imp import kkr_imp_wc
        from numpy import array

        Dict = DataFactory('dict')
        StructureData = DataFactory('structure')

        # prepare computer and code (needed so that
        prepare_code(voro_codename, codelocation, computername, workdir)
        prepare_code(kkr_codename, codelocation, computername, workdir)
        prepare_code(kkrimp_codename, codelocation, computername, workdir)

        options, wfd, voro_aux_settings = kkr_imp_wc.get_wf_defaults()

        wfd['nsteps'] = 20
        wfd['strmix'] = 0.05
        options = {
            'queue_name': queuename,
            'resources': {
                "num_machines": 1
            },
            'max_wallclock_seconds': 5 * 60,
            'use_mpi': False,
            'custom_scheduler_commands': ''
        }
        options = Dict(dict=options)
        voro_aux_settings['check_dos'] = False
        voro_aux_settings['dos_params']['kmesh'] = [10, 10, 10]
        voro_aux_settings['dos_params']['nepts'] = 10
        voro_aux_settings['natom_in_cls_min'] = 50
        voro_aux_settings['rclustz'] = 1.5

        voro_aux_settings = Dict(dict=voro_aux_settings)
        wf_inputs = Dict(dict=wfd)

        # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations
        KKRhostCode = Code.get_from_string(kkr_codename + '@' + computername)
        KKRimpCode = Code.get_from_string(kkrimp_codename + '@' + computername)
        VoroCode = Code.get_from_string(voro_codename + '@' + computername)

        imp_info = Dict(dict={
            'Rcut': 2.5533,
            'ilayer_center': 0,
            'Zimp': [30.]
        })

        from aiida.tools.importexport import import_data
        import_data('files/db_dump_kkrcalc.tar.gz')
        kkr_calc_remote = load_node(
            '3058bd6c-de0b-400e-aff5-2331a5f5d566').outputs.remote_folder

        label = 'kkrimp_scf full Cu host_in_host'
        descr = 'kkrimp_scf full workflow for Cu bulk inlcuding GF writeout and vorostart for starting potential'

        # create process builder to set parameters
        builder = kkr_imp_wc.get_builder()
        builder.metadata.description = descr
        builder.metadata.label = label
        builder.kkrimp = KKRimpCode
        builder.voronoi = VoroCode
        builder.kkr = KKRhostCode
        builder.options = options
        builder.voro_aux_parameters = voro_aux_settings
        builder.wf_parameters = wf_inputs
        builder.impurity_info = imp_info
        builder.remote_data_host = kkr_calc_remote

        # now run calculation
        from aiida.engine import run
        print(builder)
        out = run(builder)

        # check outcome
        n = out['workflow_info']
        n = n.get_dict()
        for sub in 'auxiliary_voronoi gf_writeout kkr_imp_sub'.split():
            assert sub in list(n.get('used_subworkflows').keys())

        kkrimp_sub = load_node(n['used_subworkflows']['kkr_imp_sub'])
        assert kkrimp_sub.outputs.workflow_info.get_dict().get('successful')
Ejemplo n.º 23
0
def test_restart_wf_submit(
    db_test_app,
    get_structure,
    upload_basis_set_family,
    file_regression,
    data_regression,
):
    """Test restarting from a previous fort.9 file."""
    code = db_test_app.get_or_create_code("crystal17.main")

    # Prepare input parameters
    params = {
        "title": "NiO Bulk with AFM spin",
        "scf.single": "UHF",
        "scf.k_points": (8, 8),
        "scf.spinlock.SPINLOCK": (0, 15),
        "scf.numerical.FMIXING": 30,
        "scf.post_scf": ["PPAN"],
    }

    instruct = get_structure("NiO_afm")

    kind_data = KindData(
        data={
            "kind_names": ["Ni1", "Ni2", "O"],
            "spin_alpha": [True, False, False],
            "spin_beta": [False, True, False],
        })

    sym_calc = run_get_node(
        WorkflowFactory("crystal17.sym3d"),
        structure=instruct,
        settings=DataFactory("dict")(dict={
            "symprec": 0.01,
            "compute_primitive": True
        }),
    ).node
    instruct = sym_calc.get_outgoing().get_node_by_label("structure")
    symmetry = sym_calc.get_outgoing().get_node_by_label("symmetry")

    upload_basis_set_family()

    # set up calculation
    process_class = code.get_builder().process_class
    builder = process_class.create_builder(
        params,
        instruct,
        "sto3g",
        symmetry=symmetry,
        kinds=kind_data,
        code=code,
        metadata=db_test_app.get_default_metadata(with_mpi=True),
        unflatten=True,
    )

    with resource_context("crystal", "nio_sto3g_afm_scf_maxcyc") as path:
        builder.wf_folder = orm.RemoteData(computer=code.computer,
                                           remote_path=str(path))

        process_options = builder.process_class(
            inputs=builder).metadata.options

        with db_test_app.sandbox_folder() as folder:
            calc_info = db_test_app.generate_calcinfo("crystal17.main", folder,
                                                      builder)
            with folder.open(process_options.input_file_name) as f:
                input_content = f.read()

    file_regression.check(input_content)
    data_regression.check(sanitize_calc_info(calc_info))
Ejemplo n.º 24
0
###############################################################################
"""Run simple Band Structure calculation"""

import os
import sys
import click

from ase.atoms import Atoms
import numpy as np

from aiida.common import NotExistent
from aiida.engine import run
from aiida.orm import (Code, Dict, SinglefileData)
from aiida.plugins import DataFactory

StructureData = DataFactory('structure')  # pylint: disable=invalid-name
KpointsData = DataFactory('array.kpoints')  # pylint: disable=invalid-name


def example_bands(cp2k_code):
    """Run simple Band Structure calculation"""

    print("Computing Band Structure of Si...")

    thisdir = os.path.dirname(os.path.realpath(__file__))

    # structure
    positions = [
        [0.0000000000, 0.0000000000, 2.6954627656],
        [4.0431941484, 4.0431941484, 4.0431941484],
    ]
Ejemplo n.º 25
0
def test_run_nio_afm_failed_opt(
    db_test_app,
    get_structure,
    upload_basis_set_family,
    sanitise_calc_attr,
    data_regression,
):
    # type: (AiidaTestApp) -> None
    """Test running a calculation where the optimisation fails, due to reaching walltime."""
    code = db_test_app.get_or_create_code("crystal17.main")

    # Prepare input parameters
    params = {
        "title": "NiO Bulk with AFM spin",
        "geometry.optimise.type": "FULLOPTG",
        "scf.single": "UHF",
        "scf.k_points": (8, 8),
        "scf.spinlock.SPINLOCK": (0, 15),
        "scf.numerical.FMIXING": 50,
        "scf.post_scf": ["PPAN"],
    }

    instruct = get_structure("NiO_afm")

    kind_data = KindData(
        data={
            "kind_names": ["Ni1", "Ni2", "O"],
            "spin_alpha": [True, False, False],
            "spin_beta": [False, True, False],
        })

    sym_calc = run_get_node(
        WorkflowFactory("crystal17.sym3d"),
        structure=instruct,
        settings=DataFactory("dict")(dict={
            "symprec": 0.01,
            "compute_primitive": True
        }),
    ).node
    instruct = sym_calc.get_outgoing().get_node_by_label("structure")
    symmetry = sym_calc.get_outgoing().get_node_by_label("symmetry")

    upload_basis_set_family()

    # set up calculation
    process_class = code.get_builder().process_class
    builder = process_class.create_builder(
        params,
        instruct,
        "sto3g",
        symmetry=symmetry,
        kinds=kind_data,
        code=code,
        metadata=db_test_app.get_default_metadata(),
        unflatten=True,
    )

    outputs, calc_node = run_get_node(builder)
    # print(get_calcjob_report(calc_node))
    assert "optimisation" in outputs
    assert "results" in outputs

    calc_attributes = sanitise_calc_attr(calc_node.attributes)
    calc_attributes["retrieve_list"] = sorted(calc_attributes["retrieve_list"])

    results_attributes = outputs["results"].attributes
    results_attributes.pop("execution_time_seconds")
    results_attributes.pop("parser_version")
    results_attributes = recursive_round(results_attributes, 12)

    data_regression.check({
        "calc_node": calc_attributes,
        "outputs": sorted(outputs.keys()),
        "results": results_attributes,
        "optimisation": outputs["optimisation"].attributes,
    })
Ejemplo n.º 26
0
def struc_from_cif(cif):
    asecell = cif.get_ase()
    struc = DataFactory('structure')(ase=asecell)
    return struc
Ejemplo n.º 27
0
    def test_eos_wc_Cu_simple(self):
        """
        simple Cu noSOC, FP, lmax2 full example using scf workflow
        """
        from aiida.orm import Code, load_node
        from aiida.plugins import DataFactory
        from masci_tools.io.kkr_params import kkrparams
        from aiida_kkr.workflows.eos import kkr_eos_wc
        from pprint import pprint
        from numpy import array

        Dict = DataFactory('dict')
        StructureData = DataFactory('structure')

        # prepare computer and code (needed so that
        prepare_code(voro_codename, codelocation, computername, workdir)
        prepare_code(kkr_codename, codelocation, computername, workdir)

        # create structure
        alat = 6.83  # in a_Bohr
        abohr = 0.52917721067  # conversion factor to Angstroem units
        # bravais vectors
        bravais = array([[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]])

        a = 0.5 * alat * abohr
        Cu = StructureData(cell=[[a, a, 0.0], [a, 0.0, a], [0.0, a, a]])
        Cu.append_atom(position=[0.0, 0.0, 0.0], symbols='Cu')

        Cu.store()

        # here we create a parameter node for the workflow input (workflow specific parameter) and adjust the convergence criterion.
        wfd, options = kkr_eos_wc.get_wf_defaults()
        wfd['nsteps'] = 3
        wfd['settings_kkr_scf']['convergence_criterion'] = 10**-4
        wfd['settings_kkr_scf']['convergence_setting_fine'] = wfd[
            'settings_kkr_scf']['convergence_setting_coarse']
        wfd['settings_kkr_scf']['nsteps'] = 80
        wfd['settings_kkr_scf']['num_rerun'] = 2
        wfd['settings_kkr_scf']['natom_in_cls_min'] = 20
        wfd['settings_kkr_startpot']['natom_in_cls_min'] = 20
        wfd['settings_kkr_startpot']['num_rerun'] = 2
        wfd['fitfunction'] = 'sj'  # for only three points only sj fit works

        KKReos_wf_parameters = Dict(dict=wfd)
        options['queue_name'] = queuename
        options['max_wallclock_seconds'] = 5 * 60
        options['use_mpi'] = False
        options = Dict(dict=options)

        # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations
        VoroCode = Code.get_from_string(voro_codename + '@' + computername)
        KKRCode = Code.get_from_string(kkr_codename + '@' + computername)

        # Finally we use the kkrparams class to prepare a valid set of KKR parameters that are stored as a Dict object for the use in aiida
        ParaNode = Dict(dict=kkrparams(
            LMAX=2, RMAX=7, GMAX=65, NSPIN=1, RCLUSTZ=1.9).get_dict())

        label = 'KKR-eos for Cu bulk'
        descr = 'KKR equation of states for Cu bulk'

        # create process builder to set parameters
        builder = kkr_eos_wc.get_builder()
        builder.calc_parameters = ParaNode
        builder.voronoi = VoroCode
        builder.kkr = KKRCode
        builder.structure = Cu
        builder.wf_parameters = KKReos_wf_parameters
        builder.options = options
        builder.metadata.label = label
        builder.metadata.description = descr

        # now run calculation
        from aiida.engine import run
        out = run(builder)

        # load node of workflow
        print(out)
        n = out['eos_results']

        # get output dictionary
        out = n.get_dict()
        print(
            '\n\noutput dictionary:\n-------------------------------------------------'
        )
        pprint(out)

        # finally check some output
        print(
            '\n\ncheck values ...\n-------------------------------------------------'
        )

        print('successful', out['successful'])
        assert out['successful']

        print('rms', out['rms'])
        assert max(out['rms']) < 10**-4

        print('gs_scale_factor', out['gs_scale_factor'])
        assert abs(out['gs_scale_factor'] - 1.07077031740822) < 10**-7

        print('\ndone with checks\n')
Ejemplo n.º 28
0
def read_cif_folder(path=os.getcwd(),
                    recursive=True,
                    store=False,
                    log=False,
                    comments='',
                    extras='',
                    logfile_name='read_cif_folder_logfile'):
    """
    Method to read in cif files from a folder and its subfolders.
    It can convert them into AiiDA structures and store them.

    defaults input parameter values are:
    path=".", recursive=True, store=False, log=False, comments='', extras=''

    :params: path: Path to the dictionary with the files (default, where this method is called)
    :params: recursive: bool, If True: looks aso in subfolders, if False: just given dir
    :params: store: bool, if True: stores structures in database
    :params: log: bool, if True, writes a logfile with information (pks, and co)
    :params: comments: string: comment to add to the structures
    :params: extras: dir/string/arb: extras added to the structures stored in the db

    """
    # TODO check for duplicates in the database, so that reruning the functions
    # won't import anything else in the database
    cifdata = DataFactory('cif')
    ############ parameters for the user to set ########

    parent_cif_folder = path  # folder path
    store_db = store  # True # store stuff in database?
    write_log = log  # write a logfiles on what was saved
    comment = comments  # comments and extras to add to the structure nodes.
    extra = extras  # helpfull for finding them again in the db
    rek = recursive  # search also in subfolders or only given folder

    #####################
    filenames = []
    filepaths = []
    infofilestring = (
        'Structure Formula, Structuredata pk, Structure Data uuid, cif-file-path, comment, extras \n'
    )

    #1. get all the files
    if rek:
        for root, dirs, files in os.walk(parent_cif_folder):
            for file1 in files:
                if file1.endswith('.cif'):
                    filenames.append(file1)
                    filepath = os.path.join(root, file1)
                    filepaths.append(filepath)
    else:
        dir_list = os.listdir(parent_cif_folder)
        for filename in dir_list:
            if filename.endswith('.cif'):
                filenames.append(filename)
                filepath = os.path.join(parent_cif_folder, filename)
                filepaths.append(filepath)

    nfiles = len(filenames)
    print('{} cif-files found in folder "{}" '.format(nfiles,
                                                      parent_cif_folder))

    structuredatas = []

    #2. read all the files and store stuff.
    saved_count = 0
    saved_count_cif = 0
    filenames2 = []
    structuredatas2 = []
    for i in range(nfiles):
        try:
            new_cif = cifdata.get_or_create(filepaths[i], store_cif=True)
        except (ValueError, AttributeError, ImportError) as emessage:
            print(('invalid cif file: {}, the error message was {} '.format(
                filepaths[i], emessage)))
            continue
        #print new_cif
        if new_cif[1]:
            saved_count_cif = saved_count_cif + 1
        # do we want to save the structures again, or do we also continue
        #else:
        #    continue
        #asecell = new_cif[0].get_ase()
        #structuredatas.append(DataFactory('structure'))
        filenames2.append(filenames[i])
        #struc = structuredatas[-1](ase=asecell)
        #formula = struc.get_formula()
        if store_db:
            struc = wf_struc_from_cif(new_cif[0])
            formula = struc.get_formula()
            #new_cif.store()
            #struc.store()
            saved_count = saved_count + 1

            # add comment or extras, only possible after storing
            if comment:
                user = struc.user  # we are the creator
                struc.add_comment(comment, user)
            if extra:
                if isinstance(extra, dict):
                    struc.set_extra_many(extra)
                else:
                    struc.set_extra('specification', extra)
            struc.set_extra('formula', formula)
            structuredatas2.append(struc)
        else:
            struc = struc_from_cif(new_cif[0])
            structuredatas2.append(struc)
            formula = struc.get_formula()
        if write_log:
            # This file is a logfile/info file created by 'read_cif_folder'
            # Structure Formula, structuredata pk, Structure Data uuid,
            #'cif-file-path', comment, extras
            # TODO? if not stored write not stored
            if store_db:
                infofilestring = infofilestring + '{} {} {} {} {} {} \n'.format(
                    formula, struc.pk, struc.uuid, filepaths[i],
                    struc.get_comments(), struc.extras)
            else:
                infofilestring = (infofilestring + '{} notstored notstored {}'
                                  'notstored notstored \n'
                                  ''.format(formula, filepaths[i]))

    # write a logfile
    if write_log:
        file1 = os.open(logfile_name, os.O_RDWR | os.O_CREAT)
        os.write(file1, bytes(infofilestring, 'UTF8'))
        os.close(file1)
    print('{} cif-files and {} structures were saved in the database'.format(
        saved_count_cif, saved_count))

    return structuredatas2, filenames2
"""Test simple DFT calculations with Gaussian Datatypes"""

from io import StringIO

import pytest
import ase.build

from aiida.plugins import CalculationFactory, DataFactory
from aiida.common.exceptions import LoadingEntryPointError, MissingEntryPointError

from aiida.engine import run, run_get_node
from aiida.orm import Dict, StructureData
from aiida.engine.processes.calcjobs.tasks import PreSubmitException

try:
    BasisSet = DataFactory("gaussian.basisset")  # pylint: disable=invalid-name
    Pseudo = DataFactory("gaussian.pseudo")  # pylint: disable=invalid-name
except (LoadingEntryPointError, MissingEntryPointError):
    pytest.skip("Gaussian Datatypes are not available",
                allow_module_level=True)

# Note: the basissets and pseudos deliberately have a prefix to avoid matching
#       any CP2K provided entries which may creep in via the DATA_DIR

# pylint: disable=line-too-long, redefined-outer-name
BSET_DATA = {
    "simple":
    """\
 H  MY-DZVP-MOLOPT-GTH MY-DZVP-MOLOPT-GTH-q1
 1
 2 0 1 7 2 1
Ejemplo n.º 30
0
# get code
computer = helpers.get_computer('localhost')
code = helpers.get_code(entry_point='sirius.py.nlcg', computer=computer)

params = {
    "electronic_structure_method": "pseudopotential",
    "xc_functionals": ["XC_GGA_X_PBE", "XC_GGA_C_PBE"],
    "smearing_width": 0.025,
    "use_symmetry": True,
    "num_mag_dims": 1,
    "gk_cutoff": 6.0,
    "pw_cutoff": 27.00,
    "num_dft_iter": 2
}
# Prepare input parameters
SiriusParameters = DataFactory('sirius.scf')
StructureData = DataFactory('structure')
KpointsData = DataFactory('array.kpoints')
parameters = SiriusParameters({
    'control': {},
    'iterative_solver': {},
    'parameters': params,
    'mixer': {}
})
SinglefileData = DataFactory('singlefile')

NLCGParameters = DataFactory('sirius.py.nlcg')
nlcg_marzari = {'type': 'Marzari', 'inner': 2, 'fd_slope_check': False}
precond = {'type': 'kinetic', 'eps': 0.001}
nlcgconfig = {
    "CG": {