def set_data(self, data): """ Replace the current data with another one. :param data: The dictionary to set. """ from aiida.common.exceptions import ModificationNotAllowed # first validate the inputs validate_against_schema(data, self.data_schema) # store all but the symmetry operations as attributes backup_dict = copy.deepcopy(dict(self.attributes)) try: # Clear existing attributes and set the new dictionary self._update_attributes( {k: v for k, v in data.items() if k != "operations"}) self.set_attribute("num_symops", len(data["operations"])) except ModificationNotAllowed: # pylint: disable=try-except-raise # I re-raise here to avoid to go in the generic 'except' below that # would raise the same exception again raise except Exception: # Try to restore the old data self.clear_attributes() self._update_attributes(backup_dict) raise # store the symmetry operations on file self._set_operations(data["operations"])
def _validate(self): super(SymmetryData, self)._validate() fname = self._ops_filename if fname not in self.list_object_names(): raise SchemeError("operations not set") validate_against_schema(self.get_dict(), self.data_schema)
def validate_parameters(cls, dct): """validate an input dictionary Parameters ---------- dct : dict """ validate_against_schema(dct, cls.data_schema)
def test_toplevel_pass(): data = { # "title": "a title", # "geometry": {}, # "basis_set": {}, "scf": { "k_points": [8, 8] } } validate_against_schema(data, "inputd12.schema.json")
def write_input(indict, basis_sets, atom_props=None): """Write input of a validated input dictionary. Parameters ---------- indict: dict dictionary of inputs basis_sets: list list of basis set strings or objects with `content` property atom_props: dict or None atom ids with specific properties; "spin_alpha", "spin_beta", "unfixed", "ghosts" Returns ------- str """ # validation validate_against_schema(indict, "inputd12.schema.json") if not basis_sets: raise ValueError("there must be at least one basis set") elif not all( [isinstance(b, str) or hasattr(b, "content") for b in basis_sets]): raise ValueError("basis_sets must be either all strings" "or all objects with a `content` property") if atom_props is None: atom_props = {} if not set(atom_props.keys()).issubset( ["spin_alpha", "spin_beta", "unfixed", "ghosts", "chemod"]): raise ValueError("atom_props should only contain: " "'spin_alpha', 'spin_beta', 'unfixed', 'ghosts'") # validate that a index isn't in both spin_alpha and spin_beta allspin = atom_props.get("spin_alpha", []) + atom_props.get( "spin_beta", []) if len(set(allspin)) != len(allspin): raise ValueError( "a kind cannot be in both spin_alpha and spin_beta: {}".format( allspin)) outstr = "" # Title title = get_keys(indict, ["title"], "CRYSTAL run") outstr += "{}\n".format(" ".join(title.splitlines())) # must be one line outstr = _geometry_block(outstr, indict, atom_props) outstr = _basis_set_block(outstr, indict, basis_sets, atom_props) outstr = _hamiltonian_block(outstr, indict, atom_props) return outstr
def extract_data(input_string): """extract data from a main.d12 CRYSTAL17 file - Any geometry creation commands are ignored - Basis sets must be included explicitly (no keywords) and are read into the basis_sets list - FRAGMENT, GHOSTS and ATOMSPIN commands are read into the atom_props dict - Otherwise, only commands contained in the inputd12.schema.json are allowed :param input_string: a string if the content of the file :returns param_dict: the parameter dict for use in ``crystal17.main`` calculation :returns basis_sets: a list of the basis sets :returns atom_props: a dictionary of atom specific values (spin_alpha, spin_beta, ghosts, fragment) """ lines = input_string.splitlines() schema = load_schema("inputd12.schema.json") output_dict = {} basis_sets = [] atom_props = {} output_dict["title"] = _pop_line(lines) _read_geom_block(lines, output_dict, schema) line = _pop_line(lines) if line == "OPTGEOM": line = _read_geomopt_block(atom_props, line, lines, output_dict, schema) if line == "BASISSET": raise NotImplementedError("key word basis set input (BASISSET)") if not line == "END": raise IOError("expecting end of geom block: {}".format(line)) _read_basis_block(atom_props, basis_sets, lines, output_dict, schema) line = _pop_line(lines) _read_hamiltonian_block(atom_props, lines, output_dict, schema) output_dict = unflatten_dict(output_dict) validate_against_schema(output_dict, "inputd12.schema.json") return output_dict, basis_sets, atom_props
def read_doss_contents(content): """ read the contents of a doss.d3 input file """ lines = content.splitlines() params = {} assert lines[0].rstrip() == "NEWK" params["shrink_is"] = int(lines[1].split()[0]) params["shrink_isp"] = int(lines[1].split()[1]) assert lines[2].rstrip() == "1 0" assert lines[3].rstrip() == "DOSS" settings = lines[4].split() assert len(settings) >= 7 npro = int(settings[0]) params["npoints"] = int(settings[1]) band_first = int(settings[2]) band_last = int(settings[3]) iplo = int(settings[4]) # noqa: F841 params["npoly"] = int(settings[5]) npr = int(settings[6]) # noqa: F841 if band_first >= 0 and band_last >= 0: params["band_minimum"] = band_first params["band_maximum"] = band_last params["band_units"] = "bands" proj_index = 5 else: params["band_minimum"] = float(lines[5].split()[0]) params["band_maximum"] = float(lines[5].split()[1]) params["band_units"] = "hartree" proj_index = 6 params["atomic_projections"] = [] params["orbital_projections"] = [] for line in lines[proj_index:proj_index + npro]: values = [int(i) for i in line.split()] if values[0] > 0: params["orbital_projections"].append(values[1:]) else: params["atomic_projections"].append(values[1:]) assert lines[proj_index + npro].rstrip() == "END" validate_against_schema(params, "prop.doss.schema.json") return params
def create_rotref_content(params, validate=True): """create the contents of a ppan.d3 input file Parameters ---------- params : dict validate : bool Validate the parameters against the JSON schema Returns ------- list[str] """ if validate: validate_against_schema(params, "prop.rotref.schema.json") lines = [] if "ROTREF" in params: if "MATRIX" in params["ROTREF"]: if not _test_unitary(params["ROTREF"]["MATRIX"]): raise ValueError( "The ROTREF matrix must be unitary: {}".format( params["ROTREF"]["MATRIX"])) lines.extend([ "ROTREF", "MATRIX", ]) for row in params["ROTREF"]["MATRIX"]: lines.append("{0:.8f} {1:.8f} {2:.8f}".format(*row)) if "ATOMS" in params["ROTREF"]: lines.extend([ "ROTREF", "ATOMS", ]) for row in params["ROTREF"]["ATOMS"]: lines.append("{0}".format(row[0])) lines.append("{0} {1} {2}".format(*row[1:])) return lines
def validate_parameters(cls, data, _): dct = data.get_dict() k_points = dct.pop("k_points") validate_against_schema({"k_points": k_points}, "prop.newk.schema.json") if "ROTREF" in dct: rotref = dct.pop("ROTREF") validate_against_schema({"ROTREF": rotref}, "prop.rotref.schema.json") validate_against_schema(dct, "prop.doss.schema.json")
def validate_settings(cls, settings_data, _): settings_dict = settings_data.get_dict() validate_against_schema(settings_dict, cls.get_settings_schema())
def create_vesta_input(atoms, cube_filepath=None, settings=None): """Return the file content of a VESTA input file. Parameters ---------- atoms: ase.Atoms cube_filepath: str or None settings: dict Settings that will be merged with the default settings, and validated against 'vesta_input.schema.json' Returns ------- str """ settings = get_complete_settings(settings) validate_against_schema(settings, "vesta_input.schema.json") for dim in ("x", "y", "z"): if not settings["bounds"][dim + "min"] < settings["bounds"][dim + "max"]: raise ValueError( "bounds: {0}min must be less than {0}max".format(dim)) if (not settings["2d_display"][dim + "min"] < settings["2d_display"][dim + "max"]): raise ValueError( "2d_display: {0}min must be less than {0}max".format(dim)) el_info = { s: SymbolInfo(*VESTA_ELEMENT_INFO[s]) for s in set(atoms.get_chemical_symbols()) } # header lines = [ "#VESTA_FORMAT_VERSION 3.3.0", "", "CRYSTAL", "", "TITLE", "" # NB: originally used cube_data.header[0], # but the file load can fail if a key word (like CRYSTAL) is in the title "AIIDA_DATA", "", ] # density input if cube_filepath is not None: lines.extend( ["IMPORT_DENSITY 1", "+1.000000 {}".format(cube_filepath), ""]) # symmetry lines.extend([ "GROUP", "1 1 P 1", "SYMOP", "0.000000 0.000000 0.000000 1 0 0 0 1 0 0 0 1 1", "-1.0 -1.0 -1.0 0 0 0 0 0 0 0 0 0", ]) # position and orientation (hard-coded) # LORIENT: # <this plane h, k, l> <global h, k, l> # <this plane u, v, w> <global u, v, w> lines.extend( dedent("""\ TRANM 0 0.000000 0.000000 0.000000 1 0 0 0 1 0 0 0 1 LTRANSL -1 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 LORIENT -1 0 0 0 0 1.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 1.000000 LMATRIX 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000""").splitlines()) # cell parameters lines.extend([ "CELLP", " {:.6f} {:.6f} {:.6f} {:.6f} {:.6f} {:.6f}".format( *atoms.get_cell_lengths_and_angles()), " 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000", ]) # atomic sites lines.append("STRUC") for i, ((x, y, z), symbol) in enumerate( zip(atoms.get_scaled_positions(), atoms.get_chemical_symbols())): # type: (int, ase.Atom) label = (settings["sites"].get(str(i + 1), {}).get( "label", "{sym:s}{idx:d}".format(sym=symbol, idx=i + 1))) lines.append( " {idx:<2d} {sym:<2s} {label:4s} {occ:.6f} {x:.6f} {y:.6f} {z:.6f} {wyck} -" .format(idx=i + 1, sym=symbol, label=label, occ=1.0, x=x, y=y, z=z, wyck="1")) lines.append(" 0.000000 0.000000 0.000000 {charge:.6f}".format( charge=0)) lines.append(" 0 0 0 0 0 0 0") # isotropic displacement parameter lines.append("THERI 0") for i, atom in enumerate(atoms): # type: (int, ase.Atom) label = (settings["sites"].get(str(i + 1), {}).get( "label", "{sym:s}{idx:d}".format(sym=atom.symbol, idx=i + 1))) lines.append(" {idx:<2d} {label:4s} 1.000000".format(idx=i + 1, label=label)) lines.append(" 0 0 0") lines.extend([ "SHAPE", " 0 0 0 0 0.000000 0 192 192 192 192" ]) # repeat unit cell lines.extend([ "BOUND", " {xmin:.6f} {xmax:.6f} {ymin:.6f} {ymax:.6f} {zmin:.6f} {zmax:.6f}". format(**settings["bounds"]), " 0 0 0 0 0", ]) # neighbour bonds lines.append("SBOND") for i, (sym1, sym2, minr, maxr, smode, bmode, poly, label) in enumerate(settings["bonds"]["compute"]): lines.append(( " {idx:<2d} {sym1:<2s} {sym2:<2s} {minr:.6f} {maxr:.6f} " "{search_mode} {bound_mode} {show_polyhedra} {search_by_label} 1 " "{radius:.6f} {width:.6f} 180 180 180").format( idx=i + 1, sym1=sym1, sym2=sym2, minr=minr, maxr=maxr, search_mode=smode, bound_mode=bmode, show_polyhedra=1 if poly else 0, search_by_label=1 if label else 0, radius=settings["bonds"]["radius"], width=settings["bonds"]["width"], ), ) lines.append(" 0 0 0 0") # site radii and colors lines.append("SITET") for i, atom in enumerate(atoms): symbol = atom.symbol if str(i + 1) in settings.get("sites", {}): label = settings["sites"][str(i + 1)].get( "label", "{sym:s}{idx:d}".format(sym=symbol, idx=i + 1)) radius = settings["sites"][str(i + 1)].get("radius", el_info[symbol].radius) red, green, blue = settings["sites"][str(i + 1)].get( "color", (el_info[symbol].r, el_info[symbol].g, el_info[symbol].b)) else: label = "{sym:s}{idx:d}".format(sym=symbol, idx=i + 1) radius = el_info[symbol].radius red, green, blue = (el_info[symbol].r, el_info[symbol].g, el_info[symbol].b) lines.append( " {idx:<2d} {label:4s} {rad:.6f} {r} {g} {b} {r} {g} {b} 100 {show_label}" .format( idx=i + 1, label=label, rad=radius, r=int(red * 255), g=int(green * 255), b=int(blue * 255), show_label=0, # NB: needs to be used in conjunction with LBLAT )) lines.append(" 0 0 0 0 0 0") # additional lines (currently hardcoded) lines.extend([ "VECTR", " 0 0 0 0 0", "VECTT", " 0 0 0 0 0", "SPLAN", " 0 0 0 0", "LBLAT", " -1", "LBLSP", " -1", "DLATM", " -1", "DLBND", " -1", "DLPLY", " -1", ]) # TODO lattice planes # SPLAN # {idx:d} {h:.6E} {k:.6E} {l:.6E} {dist_from_o:.6E} {r:d} {g:d} {b:d} {alpha:d} # 2D data display lines.extend( dedent("""\ PLN2D 1 {h:.6E} {k:.6E} {l:.6E} {dist_from_o:.6E} 1.0000 255 255 255 255 1 96 {birds_eye} {fill_min:.6E} {fill_max:.6E} {contour_interval:.6E} {contour_min:.6E} {contour_max:.6E} 1 10 -1 2 5 {bound_width:.6f} {contour_width1:.6f} {contour_width2:.6f} {zscale:.6E} {xmin:.6f} {xmax:.6f} {ymin:.6f} {ymax:.6f} {zmin:.6f} {zmax:.6f} 0.500000 0.500000 0.500000 1.000000 """).format(birds_eye="0", **settings["2d_display"]).splitlines()) # translation and zoom (hard-coded) lines.append(" 0 0 0 1") # line colors and orientation (hard-coded) lines.extend( dedent("""\ 255 255 255 0 0 0 0 0 0 0 0 0 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0 0 0 0""").splitlines()) # element radii and colors lines.append("ATOMT") lines.extend([ " {idx:<2d} {sym:<2s} {rad:.6f} {r} {g} {b} {r} {g} {b} 100".format( idx=i + 1, sym=s, rad=el_info[s].radius, r=int(el_info[s].r * 255), g=int(el_info[s].g * 255), b=int(el_info[s].b * 255), ) for i, s in enumerate(sorted(set(atoms.get_chemical_symbols()))) ]) lines.append(" 0 0 0 0 0 0") # initial scene orientation (hard-coded) lines.extend( dedent("""\ SCENE 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000 0.000 0.000 1.000 HBOND 0 2 """).splitlines()) # style section # TODO make options variable lines.extend( dedent("""\ STYLE DISPF 147551 MODEL 0 1 0 SURFS 0 1 1 SECTS 96 0 FORMS 0 1 ATOMS 0 0 1 BONDS {bond_style} POLYS {poly_style} VECTS 1.000000 FORMP 1 1.0 0 0 0 ATOMP 24 24 0 50 2.0 0 BONDP 1 {bond_slices} {bond_radius:.6f} {bond_width:.6f} 180 180 180 POLYP 100 1 1.000 180 180 180""").format( bond_style=settings["bonds"]["style"], bond_slices=settings["bonds"]["slices"], bond_radius=settings["bonds"]["radius"], bond_width=settings["bonds"]["width"], poly_style=settings["polyhedra"]["style"], ).splitlines()) # isosurfaces lines.append("ISURF") lines.extend([(" {idx:<2d} {pos_neg:d} {val:.6f} " "{r:3d} {g:3d} {b:3d} {a1:3d} {a2:3d}").format( idx=i + 1, val=val, pos_neg=pos_neg, r=int(r * 255), g=int(g * 255), b=int(b * 255), a1=int(a1 * 255), a2=int(a2 * 255), ) for i, (val, pos_neg, r, g, b, a1, a2) in enumerate(settings["iso_surfaces"])]) lines.append(" 0 0 0 0") # final settings lines.extend( dedent("""\ TEX3P 1 0.00000E+00 1.00000E+00 SECTP 1 5.00000E-01 5.00000E-01 0.00000E+00 HKLPP 192 1 1.000 255 0 255 UCOLP 1 1 1.000 0 0 0 COMPS {compass} LABEL 1 12 1.000 0 PROJT 0 0.962 BKGRC 255 255 255 DPTHQ 1 -0.5000 3.5000 LIGHT0 1 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 20.000000 0.000000 0.000000 0.000000 -1.000000 26 26 26 255 179 179 179 255 255 255 255 255 LIGHT1 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 20.000000 0.000000 0.000000 0.000000 -1.000000 0 0 0 0 0 0 0 0 0 0 0 0 LIGHT2 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 20.000000 0.000000 0.000000 0.000000 -1.000000 0 0 0 0 0 0 0 0 0 0 0 0 LIGHT3 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 0.000000 0.000000 1.000000 0.000000 0.000000 20.000000 0.000000 0.000000 0.000000 -1.000000 0 0 0 0 0 0 0 0 0 0 0 0 ATOMM 204 204 204 255 25.600 BONDM 255 255 255 255 128.000 POLYM 255 255 255 255 128.000 SURFM 0 0 0 255 128.000 FORMM 255 255 255 255 128.000 HKLPM 255 255 255 255 128.000 """.format( compass=1 if settings["show_compass"] else 0)).splitlines()) return "\n".join(lines)
def _validate_inputs(dict_data): validate_against_schema(dict_data.get_dict(), "prop.doss.schema.json")
def test_toplevel_fail(): with pytest.raises(ValidationError): validate_against_schema({}, "inputd12.schema.json") with pytest.raises(ValidationError): validate_against_schema({"a": 1}, "inputd12.schema.json")
def test_full_pass(): data = { "title": "a title", "geometry": { "info_print": ["ATOMSYMM", "SYMMOPS"], "info_external": ["STRUCPRT"], "optimise": { "type": "FULLOPTG", "hessian": "HESSIDEN", "gradient": "NUMGRATO", "info_print": ["PRINTOPT", "PRINTFORCES"], "convergence": { "TOLDEG": 0.0003, "TOLDEX": 0.0012, "TOLDEE": 7, "MAXCYCLE": 50, "FINALRUN": 4, }, }, }, "basis_set": { "CHARGED": False, }, "scf": { "dft": { "xc": ["LDA", "PZ"], # or # "xc": "HSE06", # or # "xc": {"LSRSH-PBE": [0.11, 0.25, 0.00001]}, "SPIN": True, "grid": "XLGRID", "grid_weights": "BECKE", "numerical": { "TOLLDENS": 6, "TOLLGRID": 14, "LIMBEK": 400 }, }, # or # "single": "UHF", "k_points": [8, 8], "numerical": { "BIPOLAR": [18, 14], "BIPOSIZE": 4000000, "EXCHSIZE": 4000000, "EXCHPERM": False, "ILASIZE": 6000, "INTGPACK": 0, "MADELIND": 50, "NOBIPCOU": False, "NOBIPEXCH": False, "NOBIPOLA": False, "POLEORDR": 4, "TOLINTEG": [6, 6, 6, 6, 12], "TOLPSEUD": 6, "FMIXING": 0, "MAXCYCLE": 50, "TOLDEE": 6, "LEVSHIFT": [2, 1], "SMEAR": 0.1, }, "fock_mixing": "DIIS", # or # "fock_mixing": {"BROYDEN": [0.0001, 50, 2]}, "spinlock": { "SPINLOCK": [1, 10] }, "post_scf": ["GRADCAL", "PPAN"], }, } validate_against_schema(data, "inputd12.schema.json")
def create_doss_content(params, validate=True): """create the contents of a doss.d3 input file Parameters ---------- params : dict validate : bool Validate the parameters against the JSON schema Returns ------- list[str] Notes ----- NPRO; number of additional (to total) projected densities to calculate (<= 15) NPT; number of uniformly spaced energy values (from bottom of band INZB to top of band IFNB) INZB; band considered in DOS calculation IFNB; last band considered in DOS calculation IPLO; output type (1 = to .d25 file) NPOL; number of Legendre polynomials used to expand DOSS (<= 25) NPR; number of printing options to switch on Unit of measurement: energy: hartree; DOSS: state/hartree/cell. """ if validate: validate_against_schema(params, "prop.doss.schema.json") lines = ["DOSS"] proj_atoms = [] proj_orbitals = [] if params.get("atomic_projections", None) is not None: proj_atoms = params["atomic_projections"] if params.get("orbital_projections", None) is not None: proj_orbitals = params["orbital_projections"] npro = len(proj_atoms) + len(proj_orbitals) units = params["band_units"] if units == "bands": inzb = int(params["band_minimum"]) ifnb = int(params["band_maximum"]) assert inzb >= 0 and ifnb >= 0 erange = None elif units == "hartree": inzb = ifnb = -1 bmin = params["band_minimum"] bmax = params["band_maximum"] erange = "{} {}".format(bmin, bmax) elif units == "eV": inzb = ifnb = -1 bmin = params["band_minimum"] / 27.21138602 bmax = params["band_maximum"] / 27.21138602 erange = "{0:.8f} {1:.8f}".format(bmin, bmax) else: raise ValueError("band_units not recognised: {}".format(units)) lines.append("{npro} {npt} {inzb} {ifnb} {iplo} {npol} {npr}".format( npro=npro, npt=params.get("npoints", 1000), inzb=inzb, ifnb=ifnb, iplo=1, # output type (1=fort.25, 2=DOSS.DAT) npol=params.get("npoly", 14), npr=0, # number of printing options )) if erange is not None: lines.append(erange) if len(proj_atoms) + len(proj_orbitals) > 15: raise AssertionError("only 15 projections are allowed per calculation") for atoms in proj_atoms: lines.append("{} {}".format(-1 * len(atoms), " ".join([str(a) for a in atoms]))) for orbitals in proj_orbitals: lines.append("{} {}".format(len(orbitals), " ".join([str(o) for o in orbitals]))) lines.append("END") return lines
def validate_parameters(cls, data, _): dct = data.get_dict() validate_against_schema(dct, "prop.rotref.schema.json")
def validate_parameters(cls, data, _): validate_against_schema(data.get_dict(), "prop.newk.schema.json")