Exemple #1
0
    def group_create(self, *args):
        """
        Create a new empty group.
        """
        if not is_dbenv_loaded():
            load_dbenv()

        import argparse
        from aiida.orm import Group as G

        parser = argparse.ArgumentParser(
            prog=self.get_full_command_name(),
            description='Create a new empty group.')
        parser.add_argument('GROUPNAME', help="The name of the new group")

        args = list(args)
        parsed_args = parser.parse_args(args)

        group_name = parsed_args.GROUPNAME

        group, created = G.get_or_create(name=group_name)

        if created:
            print "Group created with PK = {} and name '{}'".format(
                group.pk, group.name)
        else:
            print "Group '{}' already exists, PK = {}".format(
                group.name, group.pk)
Exemple #2
0
    def _prepare_group_for_upload(cls,
                                  group_name,
                                  group_description=None,
                                  dry_run=False):
        """Prepare a (possibly new) group to upload a POTCAR family to."""
        if not dry_run:
            group, group_created = Group.get_or_create(
                name=group_name, type_string=cls.potcar_family_type_string)
        else:
            group = cls.get_potcar_group(group_name)
            group_created = bool(not group)
            if not group:
                group = Group(name=group_name)

        if group.user.pk != get_current_user().pk:
            raise UniquenessError(
                'There is already a POTCAR family group with name {}, but it belongs to user {}, therefore you cannot modify it'
                .format(group_name, group.user.email))

        if group_description:
            group.description = group_description
        elif group_created:
            raise ValueError(
                'A new POTCAR family {} should be created but no description was given!'
                .format(group_name))

        return group
Exemple #3
0
def create_group(name, nodes, description=None):
    """
    Creates a group for a given node list.

    So far this is only an AiiDA verdi command.
    :param name: string name for the group
    :param nodes: list of AiiDA nodes, pks, or uuids
    :param description, optional string that will be stored as description for the group

    :return: the group, AiiDa group
    usage example:

    group_name = 'delta_structures_gustav'
    nodes_to_goup_pks =[2142, 2084]
    create_group(group_name, nodes_to_group_pks, description='delta structures added by hand. from Gustavs inpgen files')
    """
    group, created = Group.get_or_create(name=name)
    if created:
        print('Group created with PK={} and name {}'.format(
            group.pk, group.name))
    else:
        print(
            'Group with name {} and pk {} already exists. Do you want to add nodes?[y/n]'
            .format(group.name, group.pk))
        answer = raw_input()
        if answer.strip().lower() == 'y':
            pass
        else:
            return
    nodes2 = []
    nodes2_pks = []
    for node in nodes:
        try:
            node = int(node)
        except ValueError:
            pass
        nodes2_pks.append(node)
        try:
            nodes2.append(load_node(node))
        except:  # NotExistentError:
            pass

    group.add_nodes(nodes2)
    print('added nodes: {} to group {} {}'.format(nodes2_pks, group.name,
                                                  group.pk))

    if description:
        group.description = description

    return group
Exemple #4
0
    def get_or_create_famgroup(cls, famname):
        """Returns a PAW family group, creates it if it didn't exists"""
        from aiida.orm import Group
        from aiida.backends.utils import get_automatic_user

        group, group_created = Group.get_or_create(name=famname,
                                                   type_string=cls.group_type)

        if group.user != get_automatic_user():
            raise UniquenessError("There is already a UpfFamily group "
                                  "with name {}, but it belongs to user {},"
                                  " therefore you cannot modify it".format(
                                      famname, group.user.email))
        return group, group_created
Exemple #5
0
    def get_or_create_famgroup(cls, famname):
        """Returns a PAW family group, creates it if it didn't exists"""
        from aiida.orm import Group
        from aiida_vasp.utils.aiida_utils import get_current_user

        group, group_created = Group.get_or_create(name=famname,
                                                   type_string=cls.group_type)

        if group.user.pk != get_current_user().pk:
            raise UniquenessError(
                "There is already a LegacyPawData family group "
                "with name {}, but it belongs to user {},"
                " therefore you cannot modify it".format(
                    famname, group.user.email))
        return group, group_created
Exemple #6
0
    def final_step(self):
        """
        Append results
        """
        main_params = self.get_parameters()

        # Retrieve the MATDYN calculation
        matdyn_calc = self.get_step_calculations(self.run_matdyn)[0]

        # get dispersions
        bandsdata = matdyn_calc.out.output_phonon_bands

        self.append_to_report("Phonon dispersions done (bandsdata pk: {})"
                              "".format(bandsdata.pk))

        self.add_result("phonon_dispersion", bandsdata)
        bandsdata.label = "Phonon bands"
        bandsdata.description = ("Phonon dispersion calculated with"
                                 " the workflow {}".format(self.pk))

        group_name = main_params.get('dispersion_group_name', None)
        if group_name is not None:
            # create or get the group
            group, created = Group.get_or_create(name=group_name)
            if created:
                self.append_to_report("Created group '{}'".format(group_name))
            # put the bands data into the group
            group.add_nodes(bandsdata)
            self.append_to_report(
                "Adding bands to group '{}'".format(group_name))

        # clean scratch leftovers, if requested
        if main_params.get('dispersion_input', {}).get('clean_workdir',
                                                       self._clean_workdir):
            self.append_to_report("Cleaning scratch directories")
            save_calcs = []
            try:
                # Note that the order is important!
                save_calcs.append(self.get_result('ph_calculation'))
                save_calcs.append(self.get_result('pw_calculation'))
            except (NameError, ValueError):
                pass
            helpers.wipe_all_scratch(self, save_calcs)

        self.next(self.exit)
Exemple #7
0
    def prep_calc(self):
        from aiida.orm import Group

        now = datetime.now().strftime("%Y%m%d%H%M%S")

        keys = ('Nm', 'N', 'Np')
        for key in keys:
            group_name = '%s-%s' % (now, key)
            self.report(group_name)
            group, _ = Group.get_or_create(name=group_name)
            self.out('structures.%s' % key, Str(group_name))
            self.ctx.structures_to_calc[key] = []
            for i, structure in enumerate(self.ctx['structures_%s' % key]):
                self.report(structure)
                structure = StructureData(pymatgen=structure)
                self.out('structures.%s' % structure.uuid, structure)
                group.add_nodes(structure)
                self.ctx.structures_to_calc[key].append((i, structure))
Exemple #8
0
    def store(self, with_transaction=True):
        """
        Store a new node in the DB, also saving its repository directory
        and attributes.

        After being called attributes cannot be
        changed anymore! Instead, extras can be changed only AFTER calling
        this store() function.

        :note: After successful storage, those links that are in the cache, and
            for which also the parent node is already stored, will be
            automatically stored. The others will remain unstored.

        :parameter with_transaction: if False, no transaction is used. This
          is meant to be used ONLY if the outer calling function has already
          a transaction open!
        """
        # TODO: This needs to be generalized, allowing for flexible methods
        # for storing data and its attributes.
        from django.db import transaction
        from aiida.common.utils import EmptyContextManager
        from aiida.common.exceptions import ValidationError
        from aiida.backends.djsite.db.models import DbAttribute
        import aiida.orm.autogroup

        if with_transaction:
            context_man = transaction.commit_on_success()
        else:
            context_man = EmptyContextManager()

        if self._to_be_stored:

            # As a first thing, I check if the data is valid
            self._validate()

            # Verify that parents are already stored. Raises if this is not
            # the case.
            self._check_are_parents_stored()

            # I save the corresponding django entry
            # I set the folder
            # NOTE: I first store the files, then only if this is successful,
            # I store the DB entry. In this way,
            # I assume that if a node exists in the DB, its folder is in place.
            # On the other hand, periodically the user might need to run some
            # bookkeeping utility to check for lone folders.
            self._repository_folder.replace_with_folder(
                self._get_temp_folder().abspath, move=True, overwrite=True)

            # I do the transaction only during storage on DB to avoid timeout
            # problems, especially with SQLite
            try:
                with context_man:
                    # Save the row
                    self._dbnode.save()
                    # Save its attributes 'manually' without incrementing
                    # the version for each add.
                    DbAttribute.reset_values_for_node(
                        self.dbnode,
                        attributes=self._attrs_cache,
                        with_transaction=False)
                    # This should not be used anymore: I delete it to
                    # possibly free memory
                    del self._attrs_cache

                    self._temp_folder = None
                    self._to_be_stored = False

                    # Here, I store those links that were in the cache and
                    # that are between stored nodes.
                    self._store_cached_input_links()

            # This is one of the few cases where it is ok to do a 'global'
            # except, also because I am re-raising the exception
            except:
                # I put back the files in the sandbox folder since the
                # transaction did not succeed
                self._get_temp_folder().replace_with_folder(
                    self._repository_folder.abspath, move=True, overwrite=True)
                raise

            # Set up autogrouping used be verdi run
            autogroup = aiida.orm.autogroup.current_autogroup
            grouptype = aiida.orm.autogroup.VERDIAUTOGROUP_TYPE
            if autogroup is not None:
                if not isinstance(autogroup, aiida.orm.autogroup.Autogroup):
                    raise ValidationError(
                        "current_autogroup is not an AiiDA Autogroup")
                if autogroup.is_to_be_grouped(self):
                    group_name = autogroup.get_group_name()
                    if group_name is not None:
                        from aiida.orm import Group

                        g = Group.get_or_create(name=group_name,
                                                type_string=grouptype)[0]
                        g.add_nodes(self)

        # This is useful because in this way I can do
        # n = Node().store()
        return self
def submit_stress_tensor(**kwargs):
    # get code
    #code = Code.get(label='pw.sirius.x', computername='piz_daint', useremail='*****@*****.**')
    code = test_and_get_code('pw.sirius.x', expected_code_type='quantumespresso.pw')
    #code.set_prepend_text(prep_text)

    # calculation should always belong to some group, otherwise things get messy after some time
    stress_tensor_grp, created = Group.get_or_create(name=kwargs['group'])
    
    # create parameters
    params = create_calculation_parameters(code,
                                           kwargs.get('partition', 'cpu'),
                                           kwargs.get('num_ranks_per_node', 36),
                                           kwargs.get('num_ranks_kp', 1),
                                           kwargs.get('num_ranks_diag', 1))
    # load structure
    structure = load_node(kwargs['structure_pk'])
    
    # generate k-points
    params['kpoints'] = KpointsData()
    params['kpoints'].set_kpoints_mesh(kwargs.get('kmesh', [24, 24, 24]), offset=(0.0, 0.0, 0.0))
    params['atomic_files'] = kwargs['atomic_files']
    params['calculation_wallclock_seconds'] = kwargs.get('time_limit', 3600)
    params['structure'] = structure
    params['num_points'] = 5
    params['group'] = kwargs['group']
    params['kpoints'].store()
    params['calculation_parameters'].store()
    params['calculation_settings'].store()

    stress_tensor_dict = {}
    stress_tensor_dict['label'] = 'stress_tensor_' + structure.get_formula() + '_' + code.label
    stress_tensor_dict['description'] = "Stress tensor for structure with PK=%i"%structure.pk
    stress_tensor_dict['calc_pk'] = []
    stress_tensor_dict['num_points'] = params['num_points']
    stress_tensor_dict['structure_pk'] = structure.pk
    stress_tensor_dict['code_pk'] = code.pk
    stress_tensor_dict['job_tag'] = params['job_tag']

    # volume scales from 0.94 to 1.06, alat scales as pow(1/3)
    scales = np.linspace(0.992, 1.008, num=params['num_points']).tolist()

    eps = np.linspace(-0.008, 0.008, num=params['num_points']).tolist()
    #scales = np.linspace(0.99, 1.05, num=params['num_points']).tolist()

    use_symmetry = .False.

    if use_symmetry:
       SGN = get_space_group_number(structure_id=structure_id)
    else:
       SGN = 1

    LC = self.get_Laue_dict(space_group_number=SGN)

    def_list = get_Lagrange_distorted_index(structure_id=structure_id, LC=LC)

    SCs = len(def_list)

    alat_steps = params['num_points']

    distorted_structure_index = []
    eps_index = 0
    for i in def_list:
        for a in eps:

            eps_index = eps_index + 1

            distorted_structure_index.append(eps_index)

    for ii in distorted_structure_index:

        a = eps[ii % alat_steps - 1]
        i = def_list[int((ii - 1) / alat_steps)]

        M_Lagrange_eps = get_Lagrange_strain_matrix(eps=a, def_mtx_index=i)

        structure_new = get_Lagrange_distorted_structure(structure_id=structure_id, M_Lagrange_eps=M_Lagrange_eps)

        structure_new.store()
        
        calc_label = 'gs_' + structure.get_formula() + '_' + code.label
        calc_desc = params['job_tag']
    
        # create calculation
        calc = create_calculation(structure_new, params, calc_label, calc_desc)
        calc.store()
        print "created calculation with uuid='{}' and PK={}".format(calc.uuid, calc.pk)
        stress_tensor_grp.add_nodes([calc])
        calc.submit()
        stress_tensor_dict['calc_pk'].append(calc.pk)
    
    stress_tensor_node = ParameterData(dict=stress_tensor_dict)
    stress_tensor_node.store()
    stress_tensor_grp.add_nodes([stress_tensor_node])
    print "created stress tensor node with uuid='{}' and PK={}".format(stress_tensor_node.uuid, stress_tensor_node.pk)
Exemple #10
0
def runner(computer_name, test_set, group_name, potcar_family, dry_run,
           experiment):
    from aiida.orm import Code, Group, load_node
    from aiida.work import submit

    config = {}
    run_info_json = py_path.local('./run_info.json')
    cutoff = 'default'
    if experiment:
        config = read_experiment_yaml(experiment)
        if not computer_name:
            computer_name = config['computer']
        if not group_name:
            group_name = config['group_name']
        if not potcar_family:
            potcar_family = config['potcar_family']
        if 'outfile' in config:
            run_info_json = py_path.local(experiment).dirpath().join(
                config['outfile'])
        test_set = test_set or config.get('test_set', 'perturbed')
        cutoff = config.get('cutoff', 'default')

    cutoff_factor = 1
    if cutoff != 'default':
        cutoff_factor = int(cutoff)

    if not dry_run:
        run_info_json.ensure()
        run_info = json.loads(run_info_json.read()
                              or '{{ "{}": {{ }} }}'.format(computer_name))
    else:
        click.echo('run_info file would be created at {}'.format(
            run_info_json.strpath))

    vasp_proc = calc_cls('vasp.vasp').process()
    inputs = vasp_proc.get_inputs_template()

    computer.set_options(computer=computer_name,
                         options_template=inputs._options)
    inputs.code = Code.get_from_string('vasp@{}'.format(computer_name))
    inputs.settings = data_cls('parameter')(dict=TEST_SETTINGS)

    structures_group_name = PERTURBED_SET_GROUPNAME
    if test_set == 'non_perturbed':
        structures_group_name = UNPERTURBED_SET_GROUPNAME
    structures_group = Group.get(name=structures_group_name)

    if not dry_run:
        calc_group, created = Group.get_or_create(name=group_name)
    else:
        created = not bool(Group.query(name=group_name))
    calc_group_msg = 'Appending to {new_or_not} group {name}.'
    new_or_not = 'new' if created else 'existing'
    click.echo(calc_group_msg.format(new_or_not=new_or_not, name=group_name))

    ## limit structures if given in experiment yaml
    structures = list(structures_group.nodes)
    only_formulae = config.get('only_formulae', None)
    if only_formulae:
        structures = [
            structure for structure in structures
            if structure.get_formula() in only_formulae
        ]

    potcar_map = scf_potcar.POTCAR_MAP

    for structure in structures:

        inputs.structure = structure
        kpoints = data_cls('array.kpoints')()
        kpoints.set_cell_from_structure(structure)
        kpoints.set_kpoints_mesh_from_density(0.15, [0] * 3)
        inputs.kpoints = kpoints

        inputs.potential = data_cls('vasp.potcar').get_potcars_from_structure(
            structure=structure, family_name=potcar_family, mapping=potcar_map)

        ispin, magmom = magnetic_info(structure, potcar_family, potcar_map)
        incar_overrides = {}
        if ispin == 1:
            magnetism_string = "non-spin-polarized"
        elif ispin == 2:
            magnetism_string = "collinear-spin"
            incar_overrides['ispin'] = ispin
        else:
            raise Exception(
                "WTF"
            )  # This is not how you do non-collinear calcs! Set noncolin = True instead
        if magmom:
            incar_overrides['magmom'] = magmom

        if cutoff_factor != 1:
            default_enmax = cutoff_from_structure(structure=structure,
                                                  potcar_family=potcar_family,
                                                  mapping=potcar_map)
            incar_overrides['enmax'] = cutoff_factor * default_enmax

        inputs.parameters = scf_incar.get_scf_incar(inputs=inputs,
                                                    overrides=incar_overrides)

        cutoff_msg = 'default'
        if cutoff_factor != 1:
            cutoff_msg = 'cutoff factor: {}'.format(cutoff_factor)

        if not dry_run:
            running_info = submit(vasp_proc, **inputs)
            running_calc = load_node(running_info.pid)
            running_calc.set_extra('magnetism', magnetism_string)
            running_calc.set_extra('cutoff', cutoff_msg)
            calc_group.add_nodes(running_calc)
            run_info[computer_name][inputs.structure.pk] = running_calc.pk
        else:
            click.echo('not submitting {}'.format(structure.get_formula()))
            from pprint import pformat
            click.echo(pformat({k: v for k, v in inputs.items()}))

    if not dry_run:
        with run_info_json.open('w') as run_info_fo:
            json.dump(run_info, run_info_fo)