Ejemplo n.º 1
0
def validate_group(callback_kwargs, ctx, param, value):
    """
    Command line option validator for an AiiDA Group. It expects a string for the value
    that corresponds to the label or a pk of an AiiDA group.

    :param callback_kwargs: an optional dictionary with arguments for internal use in the validator
    :param ctx: internal context of the click.command
    :param param: the click Parameter, i.e. either the Option or Argument to which the validator is hooked up
    :param value: a Group label or pk
    :returns: a Group instance
    """
    from aiida.common.exceptions import NotExistent
    from aiida.orm import Group

    if value is None:
        return value

    try:
        group = Group.get_from_string(value)
    except NotExistent as exception:
        pass
    else:
        return group

    try:
        group = Group.get(pk=int(value))
    except NotExistent as exception:
        raise click.BadParameter(
            "failed to load the Group with the label or pk '{}'\n{}".format(
                value, exception))
    else:
        return group
Ejemplo n.º 2
0
 def get_potcar_group(cls, group_name):
     """Return the PotcarFamily group with the given name."""
     try:
         group = Group.get(label=group_name, type_string=cls.potcar_family_type_string)
     except NotExistent:
         group = None
     return group
Ejemplo n.º 3
0
    def get_upf_group(cls, group_name):
        """
        Return the UpfFamily group with the given name.
        """
        from aiida.orm import Group

        return Group.get(name=group_name, type_string=cls.upffamily_type_string)
Ejemplo n.º 4
0
def get_pseudos_from_structure(structure, family_name):
    """Given a family name (a Siesta pseudo group in the DB, possibly with
    mixed psf and psml pseudopotentials) and an AiiDA structure
    object, return a dictionary associating each 'kind' name in the
    structure with its object (PsfData or PsmlData).

    :raise MultipleObjectsError: if more than one pseudo for the same
       element is found in the group.

    :raise NotExistent: if no pseudo for an element in the group is
       found in the group.

    """
    from aiida.common.exceptions import NotExistent, MultipleObjectsError

    family_pseudos = {}
    family = Group.get(label=family_name)
    for node in family.nodes:
        if isinstance(node, (PsfData, PsmlData)):
            if node.element in family_pseudos:
                raise MultipleObjectsError(
                    "More than one pseudo for element {} found in "
                    "family {}".format(node.element, family_name)
                )
            family_pseudos[node.element] = node

    pseudo_list = {}
    for kind in structure.kinds:
        symbol = kind.symbol
        try:
            pseudo_list[kind.name] = family_pseudos[symbol]
        except KeyError:
            raise NotExistent("No pseudo for element {} found in family {}".format(symbol, family_name))

    return pseudo_list
Ejemplo n.º 5
0
    def get_basis_group(cls, group_name):
        """
        Return the BasisFamily group with the given name.
        """
        from aiida.orm import Group

        return Group.get(name=group_name,
                         type_string=cls.basisfamily_type_string)
    def get_psml_group(cls, group_label):
        """
        Return the PsmlFamily group with the given name.
        """
        from aiida.orm import Group

        return Group.get(label=group_label,
                         type_string=cls.psmlfamily_type_string)
Ejemplo n.º 7
0
    def get_upf_group(cls, group_label):
        """Return the UPF family group with the given label.

        :param group_label: the family group label
        :return: the `Group` with the given label, if it exists
        """
        from aiida.orm import Group

        return Group.get(label=group_label, type_string=cls.upffamily_type_string)
Ejemplo n.º 8
0
def check_group_elements(group_name):
    """To check group name on AiiDA database
    
    Parameters
    ----------
    group_name : str
        Group name label
    """
    from aiida.orm import Group # aiida stuff should not be here I think

    g = Group.get(label=group_name)
    for element in g.nodes:
        yield element
Ejemplo n.º 9
0
    def _get_pseudos(self, key, structure):

        family = self._protocols[key]["pseudo_family"]
        group = Group.get(label=family)

        # To be removed in v2.0
        if "data" in group.type_string:
            from aiida_siesta.data.common import get_pseudos_from_structure
            pseudos = get_pseudos_from_structure(structure, family)
            return pseudos

        pseudos = group.get_pseudos(structure=structure)
        return pseudos
Ejemplo n.º 10
0
def mounet1(with_key_filter=False):
    pw_calc = Group.get(pk=1139193).nodes.next()
    structure = pw_calc.out.output_structure
    qstruc = StructureData.query(children__pk=structure.pk)
    attr_filters = models.DbAttribute.objects.filter(tval__endswith='alvarez')

    # Because we can't reproduce a filter on the value only with a JSON table,
    # a fairer comparison would be with a filter on the key too.
    if with_key_filter:
        attr_filters = attr_filters.filter(
            Q(key="radii_source") | Q(key="lowdim_dict.radii_source"))

    qic = InlineCalculation.query(inputs__in=qstruc).filter(
        inputs__dbattributes__in=attr_filters).distinct()

    return qic.count()
Ejemplo n.º 11
0
def mounet1():
    pw_calc = Group.get(pk=1139193).nodes.next()
    structure = pw_calc.out.output_structure
    qstruc = StructureData.query(children__pk=structure.pk).with_entities(
        DbNode.id)
    n_children = aliased(DbNode)
    qic = (InlineCalculation.query(
    ).join(DbLink, DbNode.id == DbLink.output_id).filter(
        DbLink.input_id.in_(qstruc)).join(n_children, DbNode.inputs).filter(
            or_(
                n_children.attributes["radii_source"].astext.like("%alvarez"),
                n_children.attributes[(
                    "lowdim_dict",
                    "radii_source")].astext.like("%alvarez"))).distinct())

    return qic.with_entities(func.count(DbNode.id)).scalar()
Ejemplo n.º 12
0
    def get_or_create_famgroup(cls, famname):
        '''Returns a PAW family group, creates it if it didn't exists'''
        from aiida.orm import Group
        from aiida.djsite.utils import get_automatic_user

        # TODO: maybe replace with Group.get_or_create?
        try:
            group = Group.get(name=famname, type_string=cls.group_type)
            group_created = False
        except NotExistent:
            group = Group(name=famname, type_string=cls.group_type,
                          user=get_automatic_user())
            group_created = True

        if group.user != get_automatic_user():
            raise UniquenessError("There is already a UpfFamily group "
                                  "with name {}, but it belongs to user {},"
                                  " therefore you cannot modify it".format(
                                      famname, group.user.email))
        return group, group_created
Ejemplo n.º 13
0
    def use_lapwbasis_from_family(self, family_name):
        from collections import defaultdict

        try:
            structure = self.get_inputs_dict()[self.get_linkname('structure')]
        except AttributeError:
            raise ValueError("Structure is not set yet! Therefore, the method "
                             "use_lapwbasis_from_family cannot automatically set "
                             "the LAPW basis")

        from aiida.orm import Group
        lapwbasis_group = Group.get(family_name, type_string=LapwbasisData.lapwbasisfamily_type_string)
        
        lapwbasis_list = {}
        for node in lapwbasis_group.nodes:
            if isinstance(node, LapwbasisData):
                lapwbasis_list[node.chemical_symbol] = node

        for kind in structure.kinds:
            symbol = kind.symbol
            self.use_lapwbasis(lapwbasis_list[symbol], symbol)
Ejemplo n.º 14
0
def main(
    hexagonal_relax_pk: int,
    group_pk: int,
    plot_energies: bool = False,
    vmax: float = None,
):

    grp = Group.get(id=group_pk)

    data = []
    print("load data: total {} data".format(len(grp.nodes)))
    for i, node in enumerate(grp.nodes):
        print("loading data: number {}".format(i + 1))
        twinpy = Twinpy.initialize_from_aiida_twinboundary(
            twinboundary_relax_pk=node.pk,
            hexagonal_relax_pk=hexagonal_relax_pk,
        )
        pmgstruct = get_pymatgen_structure(
            twinpy.twinboundary_analyzer.relax_analyzer.final_cell)
        spg = SpacegroupAnalyzer(pmgstruct, symprec=1e-1)
        sg = spg.get_symmetry_dataset()['international']
        print("pk: %d" % node.pk + " " + node.label + " space group: %s" % sg)
        tb_analyzer = twinpy.twinboundary_analyzer
        xshift = tb_analyzer.twinboundary_structure.xshift
        yshift = tb_analyzer.twinboundary_structure.yshift
        energy = tb_analyzer.get_formation_energy()
        data.append([xshift, yshift, energy])
    data = np.array(data)

    # plot plane interval
    if plot_energies:
        plt.figure()
        im = plt.scatter(data[:, 0],
                         data[:, 1],
                         c=data[:, 2],
                         cmap=cm.jet,
                         vmax=vmax)
        plt.colorbar(im)
        plt.show()
Ejemplo n.º 15
0
def upload_psf_family(folder,
                      group_name,
                      group_description,
                      stop_if_existing=True):
    """
    Upload a set of PSF files in a given group.

    :param folder: a path containing all PSF files to be added.
        Only files ending in .PSF (case-insensitive) are considered.
    :param group_name: the name of the group to create. If it exists and is
        non-empty, a UniquenessError is raised.
    :param group_description: a string to be set as the group description.
        Overwrites previous descriptions, if the group was existing.
    :param stop_if_existing: if True, check for the md5 of the files and,
        if the file already exists in the DB, raises a MultipleObjectsError.
        If False, simply adds the existing PsfData node to the group.
    """
    import os
    import aiida.common
    from aiida.common import aiidalogger
    from aiida.orm import Group
    from aiida.common.exceptions import UniquenessError, NotExistent
    from aiida.backends.utils import get_automatic_user
    from aiida.orm.querybuilder import QueryBuilder
    if not os.path.isdir(folder):
        raise ValueError("folder must be a directory")

    # only files, and only those ending with .psf or .PSF;
    # go to the real file if it is a symlink
    files = [
        os.path.realpath(os.path.join(folder, i)) for i in os.listdir(folder)
        if os.path.isfile(os.path.join(folder, i))
        and i.lower().endswith('.psf')
    ]

    nfiles = len(files)

    try:
        group = Group.get(name=group_name, type_string=PSFGROUP_TYPE)
        group_created = False
    except NotExistent:
        group = Group(name=group_name,
                      type_string=PSFGROUP_TYPE,
                      user=get_automatic_user())
        group_created = True

    if group.user != get_automatic_user():
        raise UniquenessError("There is already a PsfFamily group with name {}"
                              ", but it belongs to user {}, therefore you "
                              "cannot modify it".format(
                                  group_name, group.user.email))

    # Always update description, even if the group already existed
    group.description = group_description

    # NOTE: GROUP SAVED ONLY AFTER CHECKS OF UNICITY

    pseudo_and_created = []

    for f in files:
        md5sum = aiida.common.utils.md5_file(f)
        qb = QueryBuilder()
        qb.append(PsfData, filters={'attributes.md5': {'==': md5sum}})
        existing_psf = qb.first()

        #existing_psf = PsfData.query(dbattributes__key="md5",
        #                            dbattributes__tval = md5sum)

        if existing_psf is None:
            # return the psfdata instances, not stored
            pseudo, created = PsfData.get_or_create(f,
                                                    use_first=True,
                                                    store_psf=False)
            # to check whether only one psf per element exists
            # NOTE: actually, created has the meaning of "to_be_created"
            pseudo_and_created.append((pseudo, created))
        else:
            if stop_if_existing:
                raise ValueError("A PSF with identical MD5 to "
                                 " {} cannot be added with stop_if_existing"
                                 "".format(f))
            existing_psf = existing_psf[0]
            pseudo_and_created.append((existing_psf, False))

    # check whether pseudo are unique per element
    elements = [(i[0].element, i[0].md5sum) for i in pseudo_and_created]
    # If group already exists, check also that I am not inserting more than
    # once the same element
    if not group_created:
        for aiida_n in group.nodes:
            # Skip non-pseudos
            if not isinstance(aiida_n, PsfData):
                continue
            elements.append((aiida_n.element, aiida_n.md5sum))

    elements = set(elements)  # Discard elements with the same MD5, that would
    # not be stored twice
    elements_names = [e[0] for e in elements]

    if not len(elements_names) == len(set(elements_names)):
        duplicates = set(
            [x for x in elements_names if elements_names.count(x) > 1])
        duplicates_string = ", ".join(i for i in duplicates)
        raise UniquenessError("More than one PSF found for the elements: " +
                              duplicates_string + ".")

    # At this point, save the group, if still unstored
    if group_created:
        group.store()

    # save the psf in the database, and add them to group
    for pseudo, created in pseudo_and_created:
        if created:
            pseudo.store()

            aiidalogger.debug("New node {} created for file {}".format(
                pseudo.uuid, pseudo.filename))
        else:
            aiidalogger.debug("Reusing node {} for file {}".format(
                pseudo.uuid, pseudo.filename))

    # Add elements to the group all togetehr
    group.add_nodes(pseudo for pseudo, created in pseudo_and_created)

    nuploaded = len([_ for _, created in pseudo_and_created if created])

    return nfiles, nuploaded
Ejemplo n.º 16
0
def cif_import(ctx, database, max_number_species, dry_run):
    """Import structures from an external database.

    This command will call the `aiida-codtools data cif import` CLI script to perform the actual importing.
    The imported CIF files will be turned into `CifData` nodes and stored in the group `{database}/cif/raw`. The output
    of the script will be piped to a file in a folder that bears the name of the chosen database and the filename is
    created by the current date. This way it is easy to see when this script was ran for the last time. Simply by
    rerunning this script, any new CIF files that have been added to the external database since the last import will be
    simply added to the group.
    """
    import errno
    import os
    import sys

    from datetime import datetime
    from aiida.orm import Group
    from aiida_codtools.cli.data.cif import launch_cif_import

    directory = database
    filepath = '{}.log'.format(os.path.join(database, datetime.utcnow().strftime('%Y%m%d')))

    try:
        os.makedirs(directory)
    except OSError as exception:
        if exception.errno != errno.EEXIST:
            raise

    if os.path.isfile(filepath):
        echo.echo_critical('file `{}` already exists, delete it first if you want to continue'.format(filepath))

    group_cif_raw = Group.get(label='{database}/cif/raw'.format(database=database))

    if database == 'cod':
        inputs_database_specific = {}
    elif database == 'icsd':
        inputs_database_specific = {
            'importer_server': 'http://localhost/',
            'importer_db_host': '127.0.0.1',
            'importer_db_name': 'icsd',
            'importer_db_password': '******',
        }
    elif database == 'mpds':
        inputs_database_specific = {
            'importer_api_key': 'ZH2fvgPwmIdqVX4dnRjv7Q32CALN98EzN1I2YkdoP3iLMQv9'
        }

        if max_number_species > 5:
            # Anything above `quinary` will be translated to `multinary`
            max_number_species = 6

    with open(filepath, 'w') as handle:

        sys.stdout = handle

        for number_species in range(1, max_number_species + 1):

            inputs = {
                'group': group_cif_raw,
                'database': database,
                'number_species': number_species,
                'dry_run': dry_run,
            }
            inputs.update(inputs_database_specific)
            ctx.invoke(launch_cif_import, **inputs)
Ejemplo n.º 17
0
def launch(code_node, structure_group_label, workchain_group_label,
           structure_node, base_parameter_node,
           pseudo_familyname, kptper_recipang,
           nume2bnd_ratio, press_conv_thr,
           calc_method, use_conventional_structure,
           max_wallclock_seconds, max_active_calculations, max_active_elastic,
           max_nodes_submit, max_atoms_submit,
           number_of_nodes, memory_gb, ndiag, npools,
           sleep_interval, z_movement_only, z_cellrelax_only,
           strain_magnitudes, use_all_strains,
           keep_workdir, dryrun, submit_debug, run_debug):
    from aiida.orm import Group, load_node
    from aiida.orm import Bool, Dict, Float, List, Int, Str, StructureData
    from aiida.engine import submit, run
    from aiida.plugins.factories import WorkflowFactory
    # announce if running in debug mode
    if submit_debug:
        print("Running in debug mode!")

    # setup parameters
    code = load_node(code_node)
    workchain_group = Group.objects.get_or_create(label=workchain_group_label)[0]
    base_parameter = load_node(base_parameter_node)

    if structure_node:
        structure_group = Group.objects.get_or_create(label=structure_group_label)[0]
        input_structure = load_node(structure_node)
        if not isinstance(input_structure, StructureData):
            raise Exception("structure node was not a StructureData")
        structure_group.add_nodes([input_structure])

    # Load all the structures in the structure group, not-yet run in workchain_group_label
    structure_group = Group.get(label=structure_group_label)
    uncalculated_structures = retrieve_alluncalculated_structures(
                                structure_group_label,
                                workchain_group_label=workchain_group_label
    )

    if len(uncalculated_structures) == 0:
        print(("All structures in {} already have associated workchains in "
              "the group {}".format(structure_group_label, workchain_group_label)))
        sys.exit()

    # determine number of calculations to submit
    running_calculations = retrieve_numactive_calculations()
    calcs_to_submit = max_active_calculations - running_calculations
    if calc_method == 'elastic':
        running_elastic = retrieve_numactive_elastic()
        calcs_to_submit = max_active_elastic - running_elastic


    # submit calculations
    submit_counter=0
    for structure in uncalculated_structures:
        if use_conventional_structure:
            structure = wf_getconventionalstructure(structure)
        print("Preparing to launch {}".format(structure))
        print("calcs to submit: {} (active/max){}:{}".format(
                                     len(uncalculated_structures) -submit_counter,
                                                        calcs_to_submit,
                                                        max_active_calculations))
        submit_counter += 1

        if len(structure.get_ase()) > max_atoms_submit:
            print("{} has more atoms than the max allowed {}".format(structure,
                                                                     max_atoms_submit))
            print("If you wish to overide please use --max_atoms_submit")
            continue

        # ensure no more than the max number of calcs are submitted
        while (calcs_to_submit <= 0):
            running_calculations = retrieve_numactive_calculations()
            calcs_to_submit = max_active_calculations - running_calculations
            if calc_method == 'elastic':
                running_elastic = retrieve_numactive_elastic()
                calcs_to_submit = max_active_elastic - running_elastic
            if calcs_to_submit <= 0:  # in case jobs finished during submission
                if calc_method == 'elastic':
                    print(("{} elastic running,"
                          "max num elastic {} waiting....".format(
                              running_elastic, max_active_elastic)))
                else:
                    print(("{} calcs running,"
                          "max num calcs {} waiting....".format(
                              running_calculations, max_active_calculations)))
                time.sleep(sleep_interval)

        # start timer to inspect job submission times
        from timeit import default_timer as timer
        start = timer()

        # add any cell-related parameters specified from cli
        if "CELL" in base_parameter.get_dict():
            cellpress_dict = {"CELL":base_parameter.get_dict()["CELL"]}
        else:
            cellpress_dict = {}
            if press_conv_thr or z_cellrelax_only:
                    cellpress_dict["CELL"] = {}
        if press_conv_thr:
            cellpress_dict["CELL"]["press_conv_thr"] = float(press_conv_thr)
        if z_cellrelax_only:
            cellpress_dict["CELL"]["cell_dofree"] = "z"

        # determine number of bands & setup the parameters
        cellpress_parameter = Dict(dict=cellpress_dict)
        parameters = wf_setupparams(base_parameter,
                                    structure,
                                    Str(pseudo_familyname),
                                    Float(nume2bnd_ratio),
                                    cellpress_parameter)

        # determine kpoint mesh & setup kpoints
        kpoints = wf_getkpoints(structure, Int(kptper_recipang))

        # determine parallelization & resources (setup the settings & options)
        if number_of_nodes:
            num_machines = int(number_of_nodes)
        else:
            num_machines = get_nummachines(structure, pseudo_familyname)
            if calc_method in ['relax', 'vc-relax']:
               num_machines += 4
            if num_machines > int(max_nodes_submit):
                print("{} nodes requested, maximum is {}".format(num_machines, max_nodes_submit))
                print("If you wish to launch please choose nodes manually with --number_of_nodes")
                continue
        options_dict = {
            'max_wallclock_seconds': max_wallclock_seconds,
            'resources': {'num_machines': num_machines},
        }
        if memory_gb:
            options_dict['max_memory_kb'] = int(int(memory_gb)*1024*1024)
        if submit_debug:
            num_machines = 2
            options_dict['resources']['num_machines'] = num_machines
            options_dict['max_wallclock_seconds'] = int(30*60)
            options_dict['queue_name'] = 'debug'
        workchain_options = options_dict

        if npools:
            nk = npools
        else:
            nk = get_nk(num_machines, code)
        settings_dict = {
            'cmdline': ['-nk', nk],
            'no_bands': True
            }
        if ndiag:
            settings_dict['cmdline'] += ['-ndiag', ndiag]
        if z_movement_only:
            num_atoms = len(structure.get_ase())
            coordinate_fix = [[True,True,False]]*num_atoms
            settings_dict['fixed_coords'] = coordinate_fix
        settings = Dict(dict=settings_dict)

        # setup inputs & submit workchain
        clean_workdir = not keep_workdir
        inputs = {
                  'clean_workdir': Bool(clean_workdir),
                  }
        base_inputs = {
            'pw': {
                'code': code,
                'parameters': wf_delete_vccards(parameters),
                'metadata': {'options': workchain_options},
                'settings': settings,
            }
        }
        relax_inputs = {
            'base': {k: base_inputs[k]  for k in base_inputs if k != 'parameters'},
            'relaxation_scheme': Str('relax'),
            'final_scf' : Bool(False),
            'meta_convergence' : Bool(False)
        }
        if calc_method == 'scf':
            WorkChain = WorkflowFactory('quantumespresso.pw.base')
            inputs.update(base_inputs)
            inputs['pw']['structure'] = structure
            inputs['kpoints'] = kpoints
            inputs['pseudo_family'] = Str(pseudo_familyname)
        elif calc_method in ['relax', 'vc-relax']:
            WorkChain = WorkflowFactory('quantumespresso.pw.relax')
            inputs.update(relax_inputs)
            inputs['structure'] = structure
            inputs['base']['pseudo_family'] = Str(pseudo_familyname)
            inputs['base']['kpoints'] = kpoints
            if calc_method == 'relax':
                inputs['relaxation_scheme'] = Str('relax')
                parameters = wf_delete_vccards(parameters)
                inputs['base']['pw']['parameters'] = parameters
            elif calc_method == 'vc-relax':
                inputs['relaxation_scheme'] = Str('vc-relax')
                inputs['base']['pw']['parameters'] = parameters
            if calc_method == 'elastic':
                if submit_debug:
                    print("Using debug queue with elastic workchain is not advised!")
        elif calc_method == 'elastic':
            WorkChain = WorkflowFactory('elastic')
            inputs['structure'] = structure

            # Unfortunately deepcopy on code caueses issues so we need to duplicate
            # a lot of information
            sub_relax_inputs = {
                'base': {k: base_inputs[k]  for k in base_inputs if k != 'parameters'},
                'relaxation_scheme': Str('relax'),
                'final_scf' : Bool(False),
                'meta_convergence' : Bool(False)
            }
            sub_relax_inputs['base']['pseudo_family'] = Str(pseudo_familyname)
            sub_relax_inputs['base']['kpoints'] = kpoints
            sub_relax_inputs['relaxation_scheme'] = Str('relax')
            sub_relax_parameters = wf_delete_vccards(parameters)
            sub_relax_inputs['base']['pw']['parameters'] = sub_relax_parameters


            sub_vcrelax_inputs = {
                'base': {k: base_inputs[k]  for k in base_inputs if k != 'parameters'},
                'relaxation_scheme': Str('relax'),
                'final_scf' : Bool(False),
                'meta_convergence' : Bool(False)
            }
            sub_vcrelax_inputs['base']['pseudo_family'] = Str(pseudo_familyname)
            sub_vcrelax_inputs['base']['kpoints'] = kpoints
            sub_vcrelax_inputs['relaxation_scheme'] = Str('relax')
            sub_vcrelax_inputs['relaxation_scheme'] = Str('vc-relax')
            sub_vcrelax_inputs['base']['pw']['parameters'] = parameters

            inputs['initial_relax'] = sub_vcrelax_inputs
            inputs['elastic_relax'] = sub_relax_inputs

            if strain_magnitudes:
                strain_magnitudes_list = [float(x) for x in strain_magnitudes.split(',')]
                inputs['strain_magnitudes'] = List(list=strain_magnitudes_list)
            if use_all_strains:
                inputs['symmetric_strains_only'] = Bool(False)
        else:
            raise Exception("Invalid calc_method: {}".format(calc_method))

        def print_timing(start):
            end = timer()
            time_elapsed = end - start
            print("timing: {}s".format(time_elapsed))

        calcs_to_submit -= 1
        if dryrun:
            pprint("ase_structure: {}".format(structure.get_ase()))
            pprint("aiida_settings: {}".format(settings.get_dict()))
            #pprint "aiida_parameters: {}".format(inputs['base']['parameters'].get_dict())
            pprint("aiida_options: {}".format(workchain_options))
            pprint("aiida_inputs: ")
            pprint(inputs)
            print_timing(start)
            continue
        elif run_debug:
            run(WorkChain, **inputs)
            sys.exit()
        else:
            node = submit(WorkChain, **inputs)
            print("WorkChain: {} submitted".format(node))
            print_timing(start)

        if submit_debug:
            sys.exit()

        workchain_group.add_nodes([node])
def launch(input_group, input_structures, repeat_expansion, volumetric_strains,
           norm_strains, shear_strains, random_displacement,
           number_randomized_samples, max_atoms, structure_comments,
           use_conventional_structure, structure_group_label,
           structure_group_description, dryrun):
    """
    Script for distoring the cell shape for an input structure
    """
    if not dryrun:
        structure_group = Group.objects.get_or_create(
            label=structure_group_label,
            description=structure_group_description)[0]
    else:
        structure_group = None

    if input_group:
        structure_nodes = Group.get(label=input_group).nodes
    elif input_structures:
        input_structures = input_structures.split(',')
        structure_nodes = [load_node(x) for x in input_structures]
    else:
        raise Exception("Must use either input group or input structures")
    volumetric_strains = [float(x) for x in volumetric_strains.split(',')]
    norm_strains = [float(x) for x in norm_strains.split(',')]
    shear_strains = [float(x) for x in shear_strains.split(',')]
    repeat_expansion = [int(x) for x in repeat_expansion.split(',')]

    for structure_node in structure_nodes:
        extras = {
            'input_structure': structure_node.uuid,
            'repeats': repeat_expansion,
            'structure_comments': structure_comments
        }

        input_structure_ase = structure_node.get_ase()
        if use_conventional_structure:
            input_structure_ase = get_conventionalstructure(
                input_structure_ase)
            extras['conventional_structure'] = True
        if len(input_structure_ase) > max_atoms:
            print(("Skipping {} too many atoms".format(structure_node)))
            continue
        input_structure_ase = input_structure_ase.repeat(repeat_expansion)
        deformations, strained_structures = get_strained_structures(
            input_structure_ase, norm_strains, shear_strains)
        for i in range(len(strained_structures)):
            extras['deformation'] = deformations[i]
            straindeformed_structure = copy.deepcopy(strained_structures[i])
            straindeformed_cell = copy.deepcopy(straindeformed_structure.cell)
            for j in range(len(volumetric_strains)):
                extras['random_seed'] = None
                extras['random_displacement_stdev'] = None
                extras['volume_strain'] = volumetric_strains[j]
                volume_deformation = 1.0 + volumetric_strains[j]
                extras['volume_deformation'] = volume_deformation

                volumedeformed_structure = copy.deepcopy(
                    straindeformed_structure)
                volumedeformed_structure.set_cell(straindeformed_cell *
                                                  volume_deformation,
                                                  scale_atoms=True)
                store_asestructure(volumedeformed_structure, extras,
                                   structure_group, dryrun)
                for k in range(number_randomized_samples):
                    random_structure = copy.deepcopy(volumedeformed_structure)
                    random_seed = random.randint(1, 2**32 - 1)
                    extras['random_seed'] = random_seed
                    extras['random_displacement_stdev'] = random_displacement
                    random_structure.rattle(stdev=random_displacement,
                                            seed=random_seed)
                    store_asestructure(random_structure, extras,
                                       structure_group, dryrun)
Ejemplo n.º 19
0
 def get_famgroup(cls, famname):
     '''Returns a PAW family group if it exists, otherwise
     raises an exception'''
     from aiida.orm import Group
     return Group.get(name=famname, type_string=cls.group_type)
Ejemplo n.º 20
0
    def run(self, *args):
        load_dbenv()

        import argparse

        from aiida.common.exceptions import NotExistent
        from aiida.backends.djsite.db import models
        from aiida.orm import Group
        from aiida.orm.importexport import export, export_zip

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='Export data from the DB.')
        parser.add_argument('-c',
                            '--computers',
                            nargs='+',
                            type=int,
                            metavar="PK",
                            help="Export the given computers")
        parser.add_argument('-n',
                            '--nodes',
                            nargs='+',
                            type=int,
                            metavar="PK",
                            help="Export the given nodes")
        parser.add_argument(
            '-g',
            '--groups',
            nargs='+',
            metavar="GROUPNAME",
            help="Export all nodes in the given group(s), identified by name.",
            type=str)
        parser.add_argument(
            '-G',
            '--group_pks',
            nargs='+',
            metavar="PK",
            help="Export all nodes in the given group(s), identified by pk.",
            type=str)
        parser.add_argument(
            '-P',
            '--no-parents',
            dest='no_parents',
            action='store_true',
            help=
            "Store only the nodes that are explicitly given, without exporting the parents"
        )
        parser.set_defaults(no_parents=False)
        parser.add_argument(
            '-O',
            '--no-calc-outputs',
            dest='no_calc_outputs',
            action='store_true',
            help=
            "If a calculation is included in the list of nodes to export, do not export its outputs"
        )
        parser.set_defaults(no_calc_outputs=False)
        parser.add_argument('-y',
                            '--overwrite',
                            dest='overwrite',
                            action='store_true',
                            help="Overwrite the output file, if it exists")
        parser.set_defaults(overwrite=False)

        zipsubgroup = parser.add_mutually_exclusive_group()
        zipsubgroup.add_argument(
            '-z',
            '--zipfile-compressed',
            dest='zipfilec',
            action='store_true',
            help="Store as zip file (experimental, should be faster")
        zipsubgroup.add_argument(
            '-Z',
            '--zipfile-uncompressed',
            dest='zipfileu',
            action='store_true',
            help=
            "Store as uncompressed zip file (experimental, should be faster")
        parser.set_defaults(zipfilec=False)
        parser.set_defaults(zipfileu=False)

        parser.add_argument('output_file',
                            type=str,
                            help='The output file name for the export file')

        parsed_args = parser.parse_args(args)

        if parsed_args.nodes is None:
            node_pk_list = []
        else:
            node_pk_list = parsed_args.nodes

        groups_list = []

        if parsed_args.groups is not None:
            for group_name in parsed_args.groups:
                try:
                    group = Group.get_from_string(group_name)
                except (ValueError, NotExistent) as e:
                    print >> sys.stderr, e.message
                    sys.exit(1)
                node_pk_list += group.dbgroup.dbnodes.values_list('pk',
                                                                  flat=True)
                groups_list.append(group.dbgroup)

        if parsed_args.group_pks is not None:
            for group_pk in parsed_args.group_pks:
                try:
                    group = Group.get(pk=group_pk)
                except (ValueError, NotExistent) as e:
                    print >> sys.stderr, e.message
                    sys.exit(1)
                node_pk_list += group.dbgroup.dbnodes.values_list('pk',
                                                                  flat=True)
                groups_list.append(group.dbgroup)

        node_pk_list = set(node_pk_list)

        node_list = list(models.DbNode.objects.filter(pk__in=node_pk_list))
        missing_nodes = node_pk_list.difference(_.pk for _ in node_list)
        for pk in missing_nodes:
            print >> sys.stderr, ("WARNING! Node with pk= {} "
                                  "not found, skipping.".format(pk))
        if parsed_args.computers is not None:
            computer_list = list(
                models.DbComputer.objects.filter(pk__in=parsed_args.computers))
            missing_computers = set(parsed_args.computers).difference(
                _.pk for _ in computer_list)
            for pk in missing_computers:
                print >> sys.stderr, ("WARNING! Computer with pk= {} "
                                      "not found, skipping.".format(pk))
        else:
            computer_list = []

        what_list = node_list + computer_list + groups_list

        export_function = export
        additional_kwargs = {}
        if parsed_args.zipfileu:
            export_function = export_zip
            additional_kwargs.update({"use_compression": False})
        elif parsed_args.zipfilec:
            export_function = export_zip
            additional_kwargs.update({"use_compression": True})
        try:
            export_function(what=what_list,
                            also_parents=not parsed_args.no_parents,
                            also_calc_outputs=not parsed_args.no_calc_outputs,
                            outfile=parsed_args.output_file,
                            overwrite=parsed_args.overwrite,
                            **additional_kwargs)
        except IOError as e:
            print >> sys.stderr, "IOError: {}".format(e.message)
            sys.exit(1)
Ejemplo n.º 21
0
    def upload_basisset_family(cls,
                               folder,
                               group_name,
                               group_description,
                               stop_if_existing=True,
                               extension=".basis",
                               dry_run=False):
        """
        Upload a set of Basis Set files in a given group.

        :param folder: a path containing all Basis Set files to be added.
            Only files ending in the set extension (case-insensitive) are considered.
        :param group_name: the name of the group to create. If it exists and is
            non-empty, a UniquenessError is raised.
        :param group_description: a string to be set as the group description.
            Overwrites previous descriptions, if the group was existing.
        :param stop_if_existing: if True, check for the md5 of the files and,
            if the file already exists in the DB, raises a MultipleObjectsError.
            If False, simply adds the existing BasisSetData node to the group.
        :param extension: the filename extension to look for
        :param dry_run: If True, do not change the database.
        """
        from aiida.common import aiidalogger
        from aiida.orm import Group
        from aiida.common.exceptions import UniquenessError, NotExistent
        from aiida_crystal17.aiida_compatability import get_automatic_user

        automatic_user = get_automatic_user()

        if not os.path.isdir(folder):
            raise ValueError("folder must be a directory")

        # only files, and only those ending with specified exension;
        # go to the real file if it is a symlink
        files = [
            os.path.realpath(os.path.join(folder, i))
            for i in os.listdir(folder)
            if os.path.isfile(os.path.join(folder, i))
            and i.lower().endswith(extension)
        ]

        nfiles = len(files)

        try:
            group = Group.get(name=group_name, type_string=BASISGROUP_TYPE)
            group_created = False
        except NotExistent:
            group = Group(name=group_name,
                          type_string=BASISGROUP_TYPE,
                          user=automatic_user)
            group_created = True

        if group.user.email != automatic_user.email:
            raise UniquenessError(
                "There is already a BasisFamily group with name {}"
                ", but it belongs to user {}, therefore you "
                "cannot modify it".format(group_name, group.user.email))

        # Always update description, even if the group already existed
        group.description = group_description

        # NOTE: GROUP SAVED ONLY AFTER CHECKS OF UNICITY

        basis_and_created = _retrieve_basis_sets(files, stop_if_existing)
        # check whether basisset are unique per element
        elements = [(i[0].element, i[0].md5sum) for i in basis_and_created]
        # If group already exists, check also that I am not inserting more than
        # once the same element
        if not group_created:
            for aiida_n in group.nodes:
                # Skip non-basis sets
                if not isinstance(aiida_n, BasisSetData):
                    continue
                elements.append((aiida_n.element, aiida_n.md5sum))

        elements = set(
            elements)  # Discard elements with the same MD5, that would
        # not be stored twice
        elements_names = [e[0] for e in elements]

        if not len(elements_names) == len(set(elements_names)):
            duplicates = set(
                [x for x in elements_names if elements_names.count(x) > 1])
            duplicates_string = ", ".join(i for i in duplicates)
            raise UniquenessError(
                "More than one Basis found for the elements: " +
                duplicates_string + ".")

        # At this point, save the group, if still unstored
        if group_created and not dry_run:
            group.store()

        # save the basis set in the database, and add them to group
        for basisset, created in basis_and_created:
            if created:
                if not dry_run:
                    basisset.store()

                aiidalogger.debug("New node {0} created for file {1}".format(  # pylint: disable=logging-format-interpolation
                    basisset.uuid, basisset.filename))
            else:
                aiidalogger.debug("Reusing node {0} for file {1}".format(  # pylint: disable=logging-format-interpolation
                    basisset.uuid, basisset.filename))

        # Add elements to the group all together
        if not dry_run:
            group.add_nodes(basis for basis, created in basis_and_created)

        nuploaded = len([_ for _, created in basis_and_created if created])

        return nfiles, nuploaded
Ejemplo n.º 22
0
    def _protocols_checks(
            self):  # noqa: MC0001  - is mccabe too complex funct -
        """
        Here implemented all the checks on the correct structure of each protocol. It also checks
        that, for each protocol, the correct pseudo family already loaded in the database.
        """
        def raise_invalid(message):
            raise RuntimeError(
                f'invalid protocol registry `{self.__class__.__name__}`: ' +
                message)

        if not isinstance(self._protocols, dict):
            raise_invalid('protocols not collected in a dictionary')

        for k, v in self._protocols.items():
            if not isinstance(self._protocols[k], dict):
                raise_invalid(f'protocol `{k}` is not a dictionary')

            if 'description' not in v:
                raise_invalid(
                    f'protocol `{k}` does not define the mandatory key `description`'
                )

            if 'parameters' not in v:
                raise_invalid(
                    f'protocol `{k}` does not define the mandatory key `parameters`'
                )
            if "mesh-cutoff" in v["parameters"]:
                try:
                    float(v["parameters"]["mesh-cutoff"].split()[0])
                    str(v["parameters"]["mesh-cutoff"].split()[1])
                except (ValueError, IndexError):
                    raise_invalid(
                        f'Wrong format of `mesh-cutoff` in `parameters` of protocol `{k}`. Value and units required.'
                    )

            if 'basis' not in v:
                raise_invalid(
                    f'protocol `{k}` does not define the mandatory key `basis`'
                )

            if 'pseudo_family' not in v:
                raise_invalid(
                    f'protocol `{k}` does not define the mandatory key `pseudo_family`'
                )
            else:
                famname = self._protocols[k]["pseudo_family"]
                messagg = (
                    f'protocol `{k}` requires `pseudo_family` with name {famname} '
                    + 'but no family with this name is loaded in the database')
                try:
                    Group.get(label=famname)
                except exceptions.NotExistent:
                    if k == "standard_psml":
                        try:
                            Group.get(label="nc-sr-04_pbe_standard_psml")
                            from aiida_siesta.utils.warn import AiidaSiestaDeprecationWarning
                            import warnings
                            mesg = (
                                f'protocol `{k}` now requires `pseudo_family` with name `{famname}`. This is not '
                                +
                                'present in the database, but family `nc-sr-04_pbe_standard_psml` is found instead. '
                                +
                                f'This is an old name for the family of {k}. It is accepted now but deprecated '
                                +
                                'after v2.0. To create the family with updated name, run the command '
                                +
                                '`aiida-pseudo install pseudo-dojo -v 0.4 -x PBE -r SR -p standard -f psml`. '
                                +
                                'This will also remove other deprecation messages.'
                            )
                            warnings.warn(mesg, AiidaSiestaDeprecationWarning)
                            self._protocols[k][
                                "pseudo_family"] = "nc-sr-04_pbe_standard_psml"
                        except exceptions.NotExistent:
                            raise_invalid(messagg)
                    else:
                        raise_invalid(messagg)

        if self._default_protocol not in self._protocols:
            raise_invalid(
                f'default protocol `{self._default_protocol}` is not a defined protocol'
            )
Ejemplo n.º 23
0
    'md-typeofrun': 'cg',
    'md-numcgsteps': 7,
    'md-maxcgdispl': '0.200 bohr',
    'md-maxforcetol': '0.020 eV/Ang',
}

parameters = Dict(dict=params_dict)
#------------------------------------------------------------------------
#
# No basis set spec in this calculation (default)
#
#--------------------- Pseudopotentials ---------------------------------
#
# FIXME: The family name is hardwired
#
family = Group.get(label='psf_family')
pseudos_dict = family.get_pseudos(structure=s)
#-----------------------------------------------------------------------

#
#--All the inputs of a Siesta calculations are listed in a dictionary--
#
inputs = {
    'structure': s,
    'parameters': parameters,
    'code': code,
    'pseudos': pseudos_dict,
    'metadata': {
        'options': options,
        'label': "Water molecule -- geom fail"
    }
Ejemplo n.º 24
0
def runner(computer_name, test_set, group_name, potcar_family, dry_run,
           experiment):
    from aiida.orm import Code, Group, load_node
    from aiida.work import submit

    config = {}
    run_info_json = py_path.local('./run_info.json')
    cutoff = 'default'
    if experiment:
        config = read_experiment_yaml(experiment)
        if not computer_name:
            computer_name = config['computer']
        if not group_name:
            group_name = config['group_name']
        if not potcar_family:
            potcar_family = config['potcar_family']
        if 'outfile' in config:
            run_info_json = py_path.local(experiment).dirpath().join(
                config['outfile'])
        test_set = test_set or config.get('test_set', 'perturbed')
        cutoff = config.get('cutoff', 'default')

    cutoff_factor = 1
    if cutoff != 'default':
        cutoff_factor = int(cutoff)

    if not dry_run:
        run_info_json.ensure()
        run_info = json.loads(run_info_json.read()
                              or '{{ "{}": {{ }} }}'.format(computer_name))
    else:
        click.echo('run_info file would be created at {}'.format(
            run_info_json.strpath))

    vasp_proc = calc_cls('vasp.vasp').process()
    inputs = vasp_proc.get_inputs_template()

    computer.set_options(computer=computer_name,
                         options_template=inputs._options)
    inputs.code = Code.get_from_string('vasp@{}'.format(computer_name))
    inputs.settings = data_cls('parameter')(dict=TEST_SETTINGS)

    structures_group_name = PERTURBED_SET_GROUPNAME
    if test_set == 'non_perturbed':
        structures_group_name = UNPERTURBED_SET_GROUPNAME
    structures_group = Group.get(name=structures_group_name)

    if not dry_run:
        calc_group, created = Group.get_or_create(name=group_name)
    else:
        created = not bool(Group.query(name=group_name))
    calc_group_msg = 'Appending to {new_or_not} group {name}.'
    new_or_not = 'new' if created else 'existing'
    click.echo(calc_group_msg.format(new_or_not=new_or_not, name=group_name))

    ## limit structures if given in experiment yaml
    structures = list(structures_group.nodes)
    only_formulae = config.get('only_formulae', None)
    if only_formulae:
        structures = [
            structure for structure in structures
            if structure.get_formula() in only_formulae
        ]

    potcar_map = scf_potcar.POTCAR_MAP

    for structure in structures:

        inputs.structure = structure
        kpoints = data_cls('array.kpoints')()
        kpoints.set_cell_from_structure(structure)
        kpoints.set_kpoints_mesh_from_density(0.15, [0] * 3)
        inputs.kpoints = kpoints

        inputs.potential = data_cls('vasp.potcar').get_potcars_from_structure(
            structure=structure, family_name=potcar_family, mapping=potcar_map)

        ispin, magmom = magnetic_info(structure, potcar_family, potcar_map)
        incar_overrides = {}
        if ispin == 1:
            magnetism_string = "non-spin-polarized"
        elif ispin == 2:
            magnetism_string = "collinear-spin"
            incar_overrides['ispin'] = ispin
        else:
            raise Exception(
                "WTF"
            )  # This is not how you do non-collinear calcs! Set noncolin = True instead
        if magmom:
            incar_overrides['magmom'] = magmom

        if cutoff_factor != 1:
            default_enmax = cutoff_from_structure(structure=structure,
                                                  potcar_family=potcar_family,
                                                  mapping=potcar_map)
            incar_overrides['enmax'] = cutoff_factor * default_enmax

        inputs.parameters = scf_incar.get_scf_incar(inputs=inputs,
                                                    overrides=incar_overrides)

        cutoff_msg = 'default'
        if cutoff_factor != 1:
            cutoff_msg = 'cutoff factor: {}'.format(cutoff_factor)

        if not dry_run:
            running_info = submit(vasp_proc, **inputs)
            running_calc = load_node(running_info.pid)
            running_calc.set_extra('magnetism', magnetism_string)
            running_calc.set_extra('cutoff', cutoff_msg)
            calc_group.add_nodes(running_calc)
            run_info[computer_name][inputs.structure.pk] = running_calc.pk
        else:
            click.echo('not submitting {}'.format(structure.get_formula()))
            from pprint import pformat
            click.echo(pformat({k: v for k, v in inputs.items()}))

    if not dry_run:
        with run_info_json.open('w') as run_info_fo:
            json.dump(run_info, run_info_fo)