def add_to_group(node): try: g = Group.get_from_string('examples') except NotExistent: g = Group(name='examples') g.store() g.add_nodes(node)
def get_group(): try: g = Group.get_from_string('examples') except NotExistent: g = Group(name='examples') g.store() return g
def validate_group(callback_kwargs, ctx, param, value): """ Command line option validator for an AiiDA Group. It expects a string for the value that corresponds to the label or a pk of an AiiDA group. :param callback_kwargs: an optional dictionary with arguments for internal use in the validator :param ctx: internal context of the click.command :param param: the click Parameter, i.e. either the Option or Argument to which the validator is hooked up :param value: a Group label or pk :returns: a Group instance """ from aiida.common.exceptions import NotExistent from aiida.orm import Group if value is None: return value try: group = Group.get_from_string(value) except NotExistent as exception: pass else: return group try: group = Group.get(pk=int(value)) except NotExistent as exception: raise click.BadParameter( "failed to load the Group with the label or pk '{}'\n{}".format( value, exception)) else: return group
def __init__(self, **kwargs): super(TbmodelWorkflow, self).__init__(**kwargs) self.group = None try: self.group = Group.get_from_string('tbmodel') except NotExistent: self.group = Group(name='tbmodel') self.group.store()
def get_ref_from_group(element, group): """ Return a structure data node from a given group for a given element. (quit creedy, done straighforward) params: group: group name or pk params: element: string with the element i.e 'Si' returns: AiiDA StructureData node """ report = [] try: group_pk = int(group) except ValueError: group_pk = None group_name = group if group_pk is not None: try: str_group = Group(dbgroup=group_pk) except NotExistent: str_group = None message = ('You have to provide a valid pk for a Group of' 'structures or a Group name. Reference key: "group".' 'given pk= {} is not a valid group' '(or is your group name integer?)'.format(group_pk)) report.append(message) else: try: str_group = Group.get_from_string(group_name) except NotExistent: str_group = None message = ( 'You have to provide a valid pk for a Group of' 'structures or a Group name. Wf_para key: "struc_group".' 'given group name= {} is not a valid group' '(or is your group name integer?)'.format(group_name)) report.append(message) #abort_nowait('I abort, because I have no structures to calculate ...') stru_nodes = str_group.nodes #n_stru = len(stru_nodes) structure = None for struc in stru_nodes: formula = struc.get_formula() eformula = formula.translate(None, digits) # remove digits, !python3 differs if eformula == element: return struc, report report.append('Structure node for element {} not found in group {}' ''.format(element, group)) return structure, report
def test_createsolute_reconstruct_from_extra(self): from aiida_alloy.cli.create_solutesupercell_structures import cli as cli_solute from aiida_alloy.utils import ase_structures_match solute_test_group_name = "__TEST_SOLUTE_STRUCTURE" command = [ "--lattice_size", "4.04", "--supercell_shape", "1,1,1", "--matrix_element", "Al", "--firstsolute_elements", "Mg,Si,Vac", "--secondsolute_elements", "Mg,Si,Vac", "--structure_group_name", solute_test_group_name ] result = self.runner.invoke(cli_solute, command).output solute_test_group = Group.get_from_string(solute_test_group_name) solute_structures = get_all_structure_from_structuregroup( solute_test_group) assert len(solute_structures) == 10 def reconstruct_solute_fromextras(extra): from aiida_alloy.utils import gen_ase_supercell lattice_size = extra['lattice_size'] supercell_shape = extra['supercell_shape'] matrix_element = extra['matrix_element'] reconstruct_ase = gen_ase_supercell(lattice_size, supercell_shape, matrix_element) indexing = np.arange(0, len(reconstruct_ase)) if 'sol1_index' in extra: sol1_index = extra['sol1_index'] if extra['sol1_element'] == 'Vac': del reconstruct_ase[indexing[sol1_index]] indexing = [(lambda x: x - 1 if x >= sol1_index else x)(x) for x in indexing] else: reconstruct_ase[ indexing[sol1_index]].symbol = extra['sol1_element'] if 'sol2_index' in extra: sol2_index = extra['sol2_index'] if extra['sol2_element'] == 'Vac': del reconstruct_ase[indexing[sol2_index]] indexing = [(lambda x: x - 1 if x >= sol1_index else x)(x) for x in indexing] else: reconstruct_ase[ indexing[sol2_index]].symbol = extra['sol2_element'] return reconstruct_ase for solute_structure in solute_structures: original_ase = solute_structure.get_ase() reconstructed_ase = reconstruct_solute_fromextras( solute_structure.extras) assert ase_structures_match(original_ase, reconstructed_ase)
def get_nodes_from_group(group, return_format='uuid'): """ returns a list of node uuids for a given group as, name, pk, uuid or group object """ from aiida.orm import Group from aiida.common.exceptions import NotExistent nodes = [] g_nodes = [] try: group_pk = int(group) except ValueError: group_pk = None group_name = group if group_pk is not None: try: str_group = Group(dbgroup=group_pk) except NotExistent: str_group = None message = ('You have to provide a valid pk for a Group ' 'or a Group name. Reference key: "group".' 'given pk= {} is not a valid group' '(or is your group name integer?)'.format(group_pk)) print(message) elif group_name is not None: try: str_group = Group.get_from_string(group_name) except NotExistent: str_group = None message = ( 'You have to provide a valid pk for a Group or a Group name.' 'given group name= {} is not a valid group' '(or is your group name integer?)'.format(group_name)) print(message) elif isinstance(group, Group): str_group = group else: str_group = None print( 'I could not handle given input, either Group, pk, or group name please.' ) return nodes g_nodes = str_group.nodes for node in g_nodes: if return_format == 'uuid': nodes.append(node.uuid) elif return_format == 'pk': nodes.append(node.pk) return nodes
def get_para_from_group(element, group): """ get structure node for a given element from a given group of structures (quit creedy, done straighforward) """ report = [] try: group_pk = int(group) except ValueError: group_pk = None group_name = group if group_pk is not None: try: para_group = Group(dbgroup=group_pk) except NotExistent: para_group = None message = ('You have to provide a valid pk for a Group of ' 'parameters or a Group name. Reference key: "group".' 'given pk= {} is not a valid group' '(or is your group name integer?)'.format(group_pk)) report.append(message) else: try: para_group = Group.get_from_string(group_name) except NotExistent: para_group = None message = ('You have to provide a valid pk for a Group of ' 'parameters or a Group name. Wf_para key: "para_group".' 'given group name= {} is not a valid group' '(or is your group name integer?)'.format(group_name)) report.append(message) #abort_nowait('I abort, because I have no structures to calculate ...') para_nodes = para_group.nodes #n_stru = len(para_nodes) parameter = None for para in para_nodes: formula = para.get_extras().get('element', None) #eformula = formula.translate(None, digits) # remove digits, !python3 differs if formula == element: return para, report report.append('Parameter node for element {} not found in group {}' ''.format(element, group)) return parameter, report
def test_createrandom_reconstruct_from_extra(self): from aiida_alloy.cli.create_randomsupercell_structures import cli as cli_random from aiida_alloy.utils import ase_structures_match random_test_group_name = "__TEST_RANDOM_STRUCTURE" command = [ "--matrix_elements", "Al,Cu,Mg", "--lattice_sizes", "4,3,2", "--concentrations", "0.7,0.2,0.1", "--number_samples", "3", "--structure_group_name", random_test_group_name, "--random_displacement", "0.1", "--supercell_shape", "2,2,2" ] self.runner.invoke(cli_random, command) random_test_group = Group.get_from_string(random_test_group_name) random_structures = get_all_structure_from_structuregroup( random_test_group) def reconstruct_random_fromextras(extra): from aiida_alloy.utils import (randomize_asestructure_elements, gen_ase_supercell, get_average_lattice) lattice_size = get_average_lattice(extra['lattice_sizes'], extra['concentrations']) supercell_shape = extra['supercell_shape'] matrix_elements = extra['matrix_elements'] ase_structure = gen_ase_supercell(lattice_size, supercell_shape, matrix_elements[0]) reconstruct_ase = randomize_asestructure_elements( ase_structure, extra['matrix_elements'], extra['concentrations'], extra['matrix_seed']) reconstruct_ase.rattle(stdev=extra['random_displacement_stdev'], seed=int(extra['displacement_seed'])) return reconstruct_ase assert len(random_structures) == 3 for random_structure in random_structures: extra = random_structure.extras reconstruct_ase = reconstruct_random_fromextras(extra) original_ase = random_structure.get_ase() assert ase_structures_match(reconstruct_ase, original_ase)
def test_storease_stores_structure(self): from aiida_alloy.utils import (get_all_structure_from_structuregroup, store_asestructure) dryrun = False test_group = Group.get_from_string(TEST_GROUP_NAME) no_stored_structures = get_all_structure_from_structuregroup( test_group) #Test initially storing structures store_asestructure(self.basic_ase_supercell, self.test_extras, test_group, dryrun) initial_stored_structures = get_all_structure_from_structuregroup( test_group) #Test that number of stored structures does not increase with two calls store_asestructure(self.basic_ase_supercell, self.test_extras, test_group, dryrun) final_stored_structures = get_all_structure_from_structuregroup( test_group) #assert len(initial_stored_structures) == len(final_stored_structures) assert len(no_stored_structures) == 0 assert len(initial_stored_structures) == 1 assert len(final_stored_structures) == 1
def get_calcs_from_groups(self): """ Extract the crystal structures and parameter data nodes from the given groups and create calculation 'pairs' (stru, para). """ wf_dict = self.inputs.wf_parameters.get_dict() #get all delta structure str_gr = wf_dict.get('struc_group', 'delta') try: group_pk = int(str_gr) except ValueError: group_pk = None group_name = str_gr if group_pk is not None: try: str_group = Group(dbgroup=group_pk) except NotExistent: str_group = None message = ('You have to provide a valid pk for a Group of' 'structures or a Group name. Wf_para key: "struc_group".' 'given pk= {} is not a valid group' '(or is your group name integer?)'.format(group_pk)) #print(message) self.report(message) self.abort_nowait('I abort, because I have no structures to calculate ...') else: try: str_group = Group.get_from_string(group_name) except NotExistent: str_group = None message = ('You have to provide a valid pk for a Group of' 'structures or a Group name. Wf_para key: "struc_group".' 'given group name= {} is not a valid group' '(or is your group name integer?)'.format(group_name)) #print(message) self.report(message) self.abort_nowait('I abort, because I have no structures to calculate ...') #get all delta parameters para_gr = wf_dict.get('para_group', 'delta') if not para_gr: #waring use defauls message = 'COMMENT: I did recieve "para_group=None" as input. I will use inpgen defaults' self.report(message) try: group_pk = int(para_gr ) except ValueError: group_pk = None group_name = para_gr if group_pk is not None: try: para_group = Group(dbgroup=group_pk) except NotExistent: para_group = None message = ('ERROR: You have to provide a valid pk for a Group of' 'parameters or a Group name (or use None for inpgen defaults). Wf_para key: "para_group".' 'given pk= {} is not a valid group' '(or is your group name integer?)'.format(group_pk)) #print(message) self.report(message) self.abort_nowait('ERROR: I abort, because I have no paremeters to calculate and ' 'I guess you did not want to use the inpgen default...') else: try: para_group = Group.get_from_string(group_name) except NotExistent: para_group = None message = ('ERROR: You have to provide a valid pk for a Group of' 'parameters or a Group name (or use None for inpgen defaults). Wf_para key: "struc_group".' 'given group name= {} is not a valid group' '(or is your group name integer?)'.format(group_name)) #print(message) self.report(message) self.abort_nowait('ERROR: I abort, because I have no paremeters to calculate and ' 'I guess you did not want to use the inpgen default...') # creating calculation pairs (structure, parameters) para_nodesi = para_group.nodes para_nodes = [] for para in para_nodesi: para_nodes.append(para) #print para_nodes n_para = len(para_nodes) stru_nodes = str_group.nodes n_stru = len(stru_nodes) if n_para != n_stru: message = ('COMMENT: You did not provide the same number of parameter' 'nodes as structure nodes. Is this wanted? npara={} nstru={}'.format(n_para, n_stru)) self.report(message) calcs = [] for struc in stru_nodes: para = get_paranode(struc, para_nodes) #if para: calcs.append((struc, para)) #else: # calcs.append((struc)) pprint(calcs[:20]) self.ctx.calcs_to_run = calcs
def run(self, *args): load_dbenv() import argparse from aiida.common.exceptions import NotExistent from aiida.backends.djsite.db import models from aiida.orm import Group from aiida.orm.importexport import export, export_zip parser = argparse.ArgumentParser( prog=self.get_full_command_name(), description='Export data from the DB.') parser.add_argument('-c', '--computers', nargs='+', type=int, metavar="PK", help="Export the given computers") parser.add_argument('-n', '--nodes', nargs='+', type=int, metavar="PK", help="Export the given nodes") parser.add_argument( '-g', '--groups', nargs='+', metavar="GROUPNAME", help="Export all nodes in the given group(s), identified by name.", type=str) parser.add_argument( '-G', '--group_pks', nargs='+', metavar="PK", help="Export all nodes in the given group(s), identified by pk.", type=str) parser.add_argument( '-P', '--no-parents', dest='no_parents', action='store_true', help= "Store only the nodes that are explicitly given, without exporting the parents" ) parser.set_defaults(no_parents=False) parser.add_argument( '-O', '--no-calc-outputs', dest='no_calc_outputs', action='store_true', help= "If a calculation is included in the list of nodes to export, do not export its outputs" ) parser.set_defaults(no_calc_outputs=False) parser.add_argument('-y', '--overwrite', dest='overwrite', action='store_true', help="Overwrite the output file, if it exists") parser.set_defaults(overwrite=False) zipsubgroup = parser.add_mutually_exclusive_group() zipsubgroup.add_argument( '-z', '--zipfile-compressed', dest='zipfilec', action='store_true', help="Store as zip file (experimental, should be faster") zipsubgroup.add_argument( '-Z', '--zipfile-uncompressed', dest='zipfileu', action='store_true', help= "Store as uncompressed zip file (experimental, should be faster") parser.set_defaults(zipfilec=False) parser.set_defaults(zipfileu=False) parser.add_argument('output_file', type=str, help='The output file name for the export file') parsed_args = parser.parse_args(args) if parsed_args.nodes is None: node_pk_list = [] else: node_pk_list = parsed_args.nodes groups_list = [] if parsed_args.groups is not None: for group_name in parsed_args.groups: try: group = Group.get_from_string(group_name) except (ValueError, NotExistent) as e: print >> sys.stderr, e.message sys.exit(1) node_pk_list += group.dbgroup.dbnodes.values_list('pk', flat=True) groups_list.append(group.dbgroup) if parsed_args.group_pks is not None: for group_pk in parsed_args.group_pks: try: group = Group.get(pk=group_pk) except (ValueError, NotExistent) as e: print >> sys.stderr, e.message sys.exit(1) node_pk_list += group.dbgroup.dbnodes.values_list('pk', flat=True) groups_list.append(group.dbgroup) node_pk_list = set(node_pk_list) node_list = list(models.DbNode.objects.filter(pk__in=node_pk_list)) missing_nodes = node_pk_list.difference(_.pk for _ in node_list) for pk in missing_nodes: print >> sys.stderr, ("WARNING! Node with pk= {} " "not found, skipping.".format(pk)) if parsed_args.computers is not None: computer_list = list( models.DbComputer.objects.filter(pk__in=parsed_args.computers)) missing_computers = set(parsed_args.computers).difference( _.pk for _ in computer_list) for pk in missing_computers: print >> sys.stderr, ("WARNING! Computer with pk= {} " "not found, skipping.".format(pk)) else: computer_list = [] what_list = node_list + computer_list + groups_list export_function = export additional_kwargs = {} if parsed_args.zipfileu: export_function = export_zip additional_kwargs.update({"use_compression": False}) elif parsed_args.zipfilec: export_function = export_zip additional_kwargs.update({"use_compression": True}) try: export_function(what=what_list, also_parents=not parsed_args.no_parents, also_calc_outputs=not parsed_args.no_calc_outputs, outfile=parsed_args.output_file, overwrite=parsed_args.overwrite, **additional_kwargs) except IOError as e: print >> sys.stderr, "IOError: {}".format(e.message) sys.exit(1)
def group_delete(self, *args): """ Delete an existing group. """ if not is_dbenv_loaded(): load_dbenv() import argparse from aiida.common.exceptions import NotExistent from aiida.orm import Group as G from aiida.cmdline import wait_for_confirmation parser = argparse.ArgumentParser( prog=self.get_full_command_name(), description='Delete an existing group.') parser.add_argument('-f', '--force', dest='force', action='store_true', help="Force deletion of the group even if it " "is not empty. Note that this deletes only the " "group and not the nodes.") parser.add_argument('GROUP', help="The name or PK of the group to delete") parser.set_defaults(force=False) args = list(args) parsed_args = parser.parse_args(args) group = parsed_args.GROUP force = parsed_args.force try: group_pk = int(group) except ValueError: group_pk = None group_name = group if group_pk is not None: try: group = G(dbgroup=group_pk) except NotExistent as e: print >> sys.stderr, "Error: {}.".format(e.message) sys.exit(1) else: try: group = G.get_from_string(group_name) except NotExistent as e: print >> sys.stderr, "Error: {}.".format(e.message) sys.exit(1) group_pk = group.pk group_name = group.name num_nodes = len(group.nodes) if num_nodes > 0 and not force: print >> sys.stderr, ("Group '{}' is not empty (it contains {} " "nodes). Pass the -f option if you really want to delete " "it.".format(group_name, num_nodes)) sys.exit(1) sys.stderr.write("Are you sure to kill the group with PK = {} ({})? " "[Y/N] ".format(group_pk, group_name)) if not wait_for_confirmation(): sys.exit(0) group.delete() print "Group '{}' (PK={}) deleted.".format(group_name, group_pk)
def group_removenodes(self, *args): """ Remove nodes from a given group. """ from aiida.cmdline import delayed_load_node as load_node from aiida.cmdline import wait_for_confirmation if not is_dbenv_loaded(): load_dbenv() import argparse from aiida.common.exceptions import NotExistent from aiida.orm import Group as G parser = argparse.ArgumentParser( prog=self.get_full_command_name(), description='Remove nodes from a given AiiDA group.') parser.add_argument('-g', '--group', dest='group', required=True, help="The name or PK of the group you want to " "remove a node from.") parser.add_argument('nodes', nargs='+', help="The PK or UUID of the nodes to remove. An " "error is raised if the node does not exist. " "No message is shown if the node does not belong " "to the group.") parser.set_defaults(raw=False) args = list(args) parsed_args = parser.parse_args(args) group = parsed_args.group try: group_pk = int(group) except ValueError: group_pk = None group_name = group if group_pk is not None: try: group = G(dbgroup=group_pk) except NotExistent as e: print >> sys.stderr, "Error: {}.".format(e.message) sys.exit(1) else: try: group = G.get_from_string(group_name) except NotExistent as e: print >> sys.stderr, "Error: {}.".format(e.message) sys.exit(1) group_pk = group.pk group_name = group.name nodes = [] for node in parsed_args.nodes: try: node = int(node) except ValueError: pass # I leave it as a string and let load_node complain # if it is not a UUID try: nodes.append(load_node(node)) except NotExistent as e: print >> sys.stderr, "Error: {}.".format(e.message) sys.exit(1) sys.stderr.write("Are you sure to remove {} nodes from the group " "with PK = {} " "({})? [Y/N] ".format(len(nodes), group_pk, group_name)) if not wait_for_confirmation(): sys.exit(0) group.remove_nodes(nodes)
def group_show(self, *args): """ Show information on a given group. Pass the PK as a parameter. """ if not is_dbenv_loaded(): load_dbenv() import argparse from aiida.common.exceptions import NotExistent from aiida.orm import Group as G from aiida.common.utils import str_timedelta from aiida.utils import timezone from aiida.common.pluginloader import from_type_to_pluginclassname from tabulate import tabulate parser = argparse.ArgumentParser( prog=self.get_full_command_name(), description='Information on a given AiiDA group.') parser.add_argument('-r', '--raw', dest='raw', action='store_true', help="Show only a space-separated list of PKs of " "the calculations in the group") parser.add_argument('-u', '--uuid', dest='uuid', action='store_true', help="Show UUIDs together with PKs. Note: if the " "--raw option is also passed, PKs are not " "printed, but oly UUIDs.") parser.add_argument('GROUP', help="The PK of the group to show") parser.set_defaults(raw=False) parser.set_defaults(uuid=False) args = list(args) parsed_args = parser.parse_args(args) group = parsed_args.GROUP try: group_pk = int(group) except ValueError: group_pk = None group_name = group if group_pk is not None: try: group = G(dbgroup=group_pk) except NotExistent as e: print >> sys.stderr, "Error: {}.".format(e.message) sys.exit(1) else: try: group = G.get_from_string(group_name) except NotExistent as e: print >> sys.stderr, "Error: {}.".format(e.message) sys.exit(1) group_pk = group.pk group_name = group.name if parsed_args.raw: if parsed_args.uuid: print " ".join(str(_.uuid) for _ in group.nodes) else: print " ".join(str(_.pk) for _ in group.nodes) else: type_string = group.type_string desc = group.description now = timezone.now() table = [] table.append(["Group name", group.name]) table.append(["Group type", type_string if type_string else "<user-defined>"]) table.append(["Group description", desc if desc else "<no description>"]) print(tabulate(table)) table = [] header = [] if parsed_args.uuid: header.append('UUID') header.extend(['PK', 'Type', 'Created']) print "# Nodes:" for n in group.nodes: row = [] if parsed_args.uuid: row.append(n.uuid) row.append(n.pk) row.append(from_type_to_pluginclassname(n.dbnode.type). rsplit(".", 1)[1]) row.append(str_timedelta(now - n.ctime, short=True, negative_to_zero=True)) table.append(row) print(tabulate(table, headers=header))
def test_retrieve_group(self): Group.get_from_string(TEST_GROUP_NAME) assert True
def cli(code_node, structure_group_name, workchain_group_name, base_parameter_node, pseudo_familyname, kptper_recipang, nume2bnd_ratio, calc_method, max_wallclock_seconds, max_active_calculations, number_of_nodes, memory_gb, ndiag, npools, sleep_interval, z_movement_only, keep_workdir, dryrun, run_debug): valid_calc_methods = ['scf', 'relax', 'vc-relax'] if calc_method not in valid_calc_methods: raise Exception("Invalid calc_method: {}".format(calc_method)) # setup parameters code = load_node(code_node) structure_group = Group.get_from_string(structure_group_name) workchain_group = Group.objects.get_or_create(name=workchain_group_name)[0] base_parameter = load_node(base_parameter_node) # announce if running in debug mode if run_debug: print("Running in debug mode!") # Load all the structures in the structure group, not-yet run in workchain_group_name uncalculated_structures = retrieve_alluncalculated_structures( structure_group_name, workchain_group_name=workchain_group_name) if len(uncalculated_structures) == 0: print(("All structures in {} already have associated workchains in " "the group {}".format(structure_group_name, workchain_group_name))) sys.exit() # determine number of calculations to submit running_calculations = retrieve_numactive_calculations() calcs_to_submit = max_active_calculations - running_calculations # submit calculations for structure in uncalculated_structures: print("Preparing to cli {}".format(structure)) # ensure no more than the max number of calcs are submitted while (calcs_to_submit <= 0): running_calculations = retrieve_numactive_calculations() calcs_to_submit = max_active_calculations - running_calculations if calcs_to_submit <= 0: # in case jobs finished during submission print(("{} calcs running," "max num calcs {} waiting....".format( running_calculations, max_active_calculations))) time.sleep(sleep_interval) # start timer to inspect job submission times from timeit import default_timer as timer start = timer() # determine number of bands & setup the parameters parameters = wf_setupparams(base_parameter, structure, Str(pseudo_familyname), Float(nume2bnd_ratio)) # determine kpoint mesh & setup kpoints kpoints = wf_getkpoints(structure, Int(kptper_recipang)) # determine parallelization & resources (setup the settings & options) if number_of_nodes: num_machines = int(number_of_nodes) else: num_machines = get_nummachines_forcalc(structure, pseudo_familyname) max_nodes_to_submit = 20 if num_machines > max_nodes_to_submit: print("{} nodes requested, maximum is {}".format( num_machines, max_nodes_to_submit)) print( "If you wish to cli please choose nodes manually with --number_of_nodes" ) continue options_dict = { 'max_wallclock_seconds': max_wallclock_seconds, 'resources': { 'num_machines': num_machines }, } if memory_gb: options_dict['max_memory_kb'] = int(int(memory_gb) * 1024 * 1024) if run_debug: num_machines = 2 options_dict['resources']['num_machines'] = num_machines options_dict['max_wallclock_seconds'] = int(30 * 60) options_dict['queue_name'] = 'debug' workchain_options = Dict(dict=options_dict) if npools: nk = npools else: nk = get_qe_nk(num_machines, code) settings_dict = {'cmdline': ['-nk', nk], 'no_bands': True} if ndiag: settings_dict['cmdline'] += ['-ndiag', ndiag] if z_movement_only: num_atoms = len(structure.get_ase()) coordinate_fix = [[True, True, False]] * num_atoms settings_dict['fixed_coords'] = coordinate_fix settings = Dict(dict=settings_dict) # setup inputs & submit workchain clean_workdir = not keep_workdir inputs = { 'structure': structure, 'settings': settings, 'clean_workdir': Bool(clean_workdir) } base_inputs = { 'code': code, 'pseudo_family': Str(pseudo_familyname), 'kpoints': kpoints, 'parameters': parameters, 'options': workchain_options, 'settings': settings, } if calc_method == 'scf': PwBaseWorkChain = WorkflowFactory('quantumespresso.pw.base') inputs.update(base_inputs) elif calc_method == 'relax': PwBaseWorkChain = WorkflowFactory('quantumespresso.pw.relax') inputs['base'] = base_inputs inputs['relaxation_scheme'] = Str('relax') inputs['final_scf'] = Bool(False) inputs['meta_convergence'] = Bool(False) elif calc_method == 'vc-relax': PwBaseWorkChain = WorkflowFactory('quantumespresso.pw.relax') inputs['base'] = base_inputs inputs['relaxation_scheme'] = Str('vc-relax') inputs['final_scf'] = Bool(True) inputs['meta_convergence'] = Bool(True) else: raise Exception("Invalid calc_method: {}".format(calc_method)) def print_timing(start): end = timer() time_elapsed = end - start print("timing: {}s".format(time_elapsed)) if dryrun: print("ase_structure: {}".format(structure.get_ase())) print("aiida_settings: {}".format(settings.get_dict())) print("aiida_options: {}".format(workchain_options.get_dict())) print("aiida_inputs: {}".format(inputs)) print_timing(start) else: node = submit(PwBaseWorkChain, **inputs) workchain_group.add_nodes([node]) print("WorkChain: {} submitted".format(node)) print_timing(start) calcs_to_submit -= 1 if run_debug: sys.exit()