def test_Pd_setup(Pd, Pd_copy): """Makes sure the initial folders were setup according to the spec. """ #raise Exception("RAWR") Pd.setup() modelroot = path.join(Pd.root, "Manual", "phonon.manual", "Pd") #import pdb; pdb.set_trace() assert Pd["Manual/phonon/Pd/"].root == modelroot #The matdb.yml file specifies the following database: dbs = ["Manual/phonon.manual/Pd/"] #Each one should have a folder for: ["hessian", "modulations"] #On the first go, the modulations folder will be empty because the DFT #calculations haven't been performed yet. However, hessian should have DFT #folders ready to go. folders = { "__files__": ["compute.pkl", "jobfile.sh"], "S1.1": { "__files__": ["INCAR", "POSCAR", "POTCAR", "PRECALC", "KPOINTS"] } } from matdb.utility import compare_tree for db in dbs: dbfolder = path.join(Pd.root, db) compare_tree(dbfolder, folders) #import pdb; pdb.set_trace() #Now we will test some of the border cases of the database __init__ method Pd_copy.setup() db = "Manual/phonon.manual/Pd" dbfolder = path.join(Pd_copy.root, db) compare_tree(dbfolder, folders)
def test_setup(CoNiTi): """Tetsts the setup of the enumerated database. """ assert not CoNiTi.collections['prototype'].steps['prototype'].is_setup() CoNiTi.setup() db = "Prototypes/prototype.prototype/per-1" folders = { "__files__": ["compute.pkl", "jobfile.sh", "prototype_P_uuid.txt", "puuids.pkl"] } for i in range(1, 251): folders["P.{0}".format(i)] = { "__files__": ["INCAR", "PRECALC", "uuid.txt", "POSCAR", "pre_comp_atoms.h5"] } dbfolder = path.join(CoNiTi.root, db) from matdb.utility import compare_tree compare_tree(dbfolder, folders) assert CoNiTi.collections['prototype'].steps['prototype'].is_setup() prot = CoNiTi.collections['prototype'].steps['prototype'] assert len(prot.sequence['per-1'].puuids) == 250 assert not prot.ready() # We need to create fake atoms.h5 objects so that the system will # think that VASP has run and the caculations have been extracted. dbfolder = path.join(CoNiTi.root, db) for j in range(1, 251): src = path.join(dbfolder, "P.{}".format(j), "pre_comp_atoms.h5") dest = path.join(dbfolder, "P.{}".format(j), "atoms.h5") symlink(src, dest) assert len(prot.rset) == 250 assert len(prot.fitting_configs) == 250 # We run the setup one more time to ensure quick returns assert prot.ready() prot._setup_configs(False)
def test_not_extractable(Pd_not_extractable): """ test not extractable """ mPd = Pd_not_extractable tdb = Database("phonon", path.join(mPd.root, "tmp_db"), mPd, [{ "type": "simple.Manual" }], {}, 0) for config in tdb.iconfigs: assert config is not None mPd.setup() mdb = mPd.collections['phonon'].steps['manual'] assert mdb is not None assert mdb.nconfigs == 1 assert mdb.sub_dict() == {'extractable': False, 'name': 'manual'} assert not mdb.extractable assert not mdb._trainable assert mdb.is_setup() assert len(mdb.sequence) == 1 assert len(mdb.sequence['Pd'].configs) == 1 assert mdb.ready() assert mdb.can_extract() folders = { "__files__": ["phonon_S1_uuid.txt"], "Pd": { "__files__": ["phonon_S1_uuid.txt", "compute.pkl"], "S1.1": { "__files__": [ "INCAR", "PRECALC", "POSCAR", "POTCAR", "atoms.h5", "uuid.txt", "KPOINTS", "ase-sort.dat" ] } } } dbfolder = mdb.root compare_tree(dbfolder, folders) mdb.tarball() assert path.isfile(path.join(dbfolder, "Pd", "output.tar.gz"))
def test_compare_tree(tmpdir): """Tests the folder comparison method. """ from os import mkdir from matdb.utility import touch, compare_tree test_dir = str(tmpdir.join("comp_tree")) mkdir(test_dir) touch(path.join(test_dir,"compute.pkl")) touch(path.join(test_dir,"jobfile.sh")) mkdir(path.join(test_dir,"phonopy")) touch(path.join(test_dir,"phonopy","POSCAR")) folders = { "__files__": ["compute.pkl","jobfile.sh"], "phonopy": { "__files__": ["POSCAR"] }, } compare_tree(test_dir,folders)
def test_AgCu_setup(AgCu): """Test the setup of the substitutions database. """ assert not AgCu.collections['substitution'].steps['Substitution'].is_setup( ) AgCu.setup() dbs = "Substitution/substitution.Substitution/Ag1Cu5" folders = { "__files__": ["compute.pkl", "suids.pkl", "jobfile.sh", "index.json"], "S.1": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.2": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.3": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.4": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.5": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.6": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.7": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.8": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.9": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.10": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.11": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.12": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.13": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.14": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "S.15": { "__files__": ["INCAR", "PRECALC", "POSCAR"] } } from matdb.utility import compare_tree dbfolder = path.join(AgCu.root, dbs) compare_tree(dbfolder, folders) assert AgCu.collections['substitution'].steps['Substitution'].is_setup() # test the suid and index creation for the entire database. assert path.isfile( path.join(AgCu.root, "Substitution/substitution.Substitution/suids.pkl")) assert path.isfile( path.join(AgCu.root, "Substitution/substitution.Substitution/index.json")) sub = AgCu.collections['substitution'].steps['Substitution'] assert len(sub.index) == 15 assert len(sub.suids) == 15 assert sub.ready() src = relpath( "./tests/data/Pd/complete/OUTCAR__DynMatrix_phonon_Pd_dim-2.00") dbfolder = path.join(AgCu.root, dbs) for j in range(1, 16): dest = path.join(dbfolder, "S.{}".format(j), "OUTCAR") symlink(src, dest) dbfolder = path.join(AgCu.root, dbs) for j in range(1, 16): src = path.join(dbfolder, "S.{}".format(j), "POSCAR") dest = path.join(dbfolder, "S.{}".format(j), "CONTCAR") symlink(src, dest) assert len(sub.atoms_paths()) == 15 assert len(sub.rset()) == 15
def test_all(Pd): """Tetsts setup/extract/ready of the simple.Manual database. """ Pd.setup() mdb = Pd.collections['phonon'].steps['manual'] assert Pd is not None assert mdb is not None assert mdb.is_setup() assert len(mdb.sequence) == 3 assert len(mdb.sequence['Pd1'].configs) == 1 folders = { "__files__": ["phonon_S1_uuid.txt"], "Pd1": { "__files__": ["phonon_S1_uuid.txt", "jobfile.sh", "compute.pkl"], "S1.1": { "__files__": [ "INCAR", "PRECALC", "POSCAR", "POTCAR", "pre_comp_atoms.h5", "uuid.txt", "KPOINTS", "ase-sort.dat" ] } }, "Pd2": { "__files__": ["phonon_S1_uuid.txt", "jobfile.sh", "compute.pkl"], "S1.1": { "__files__": [ "INCAR", "PRECALC", "POSCAR", "POTCAR", "pre_comp_atoms.h5", "uuid.txt", "KPOINTS", "ase-sort.dat" ] } }, "Pd3": { "__files__": ["phonon_S1_uuid.txt", "jobfile.sh", "compute.pkl"], "S1.1": { "__files__": [ "INCAR", "PRECALC", "POSCAR", "POTCAR", "pre_comp_atoms.h5", "uuid.txt", "KPOINTS", "ase-sort.dat" ] } } } dbfolder = mdb.root compare_tree(dbfolder, folders) assert mdb.is_setup() assert not mdb.ready() src = relpath("./tests/data/Pd/basic_fail/S.4/OUTCAR") dbfolder = mdb.root for j in range(1, 4): dest = path.join(dbfolder, "Pd{}".format(j), "S1.1", "OUTCAR") symlink(src, dest) assert mdb.is_executing() assert mdb.is_setup() # should not be executable, because it's been executing assert not mdb.execute() # We need to fake some VASP output so that we can cleanup the # database and get the rset src = relpath( "./tests/data/Pd/complete/OUTCAR__DynMatrix_phonon_Pd_dim-2.00") dbfolder = mdb.root for j in range(1, 4): dest = path.join(dbfolder, "Pd{}".format(j), "S1.1", "OUTCAR") remove(dest) symlink(src, dest) dbfolder = mdb.root for j in range(2, 4): src = path.join(dbfolder, "Pd{}".format(j), "S1.1", "POSCAR") dest = path.join(dbfolder, "Pd{}".format(j), "S1.1", "CONTCAR") symlink(src, dest) assert not mdb.ready() # execute should return false, because not all atoms can execute(Pd1 is lacking of POSCAR file) assert not mdb.execute() src = path.join(dbfolder, "Pd1", "S1.1", "POSCAR") dest = path.join(dbfolder, "Pd1", "S1.1", "CONTCAR") symlink(src, dest) assert not mdb.execute() mdb.extract() assert not mdb.execute() assert len(mdb.sequence) == 3 assert len(mdb.sequence['Pd1'].config_atoms) == 1 assert len(mdb.sequence['Pd2'].config_atoms) == 1 assert len(mdb.sequence['Pd3'].config_atoms) == 1 assert len(mdb.sequence['Pd1'].configs) == 1 assert len(mdb.sequence['Pd2'].configs) == 1 assert len(mdb.sequence['Pd3'].configs) == 1 assert len(mdb.fitting_configs) == 3 assert len(mdb.rset) == 3 assert not mdb.is_executing() assert mdb.ready() # setup again(with rerun=True) on an already ready database Pd.setup(rerun=True) assert mdb.is_setup() assert mdb.ready() assert len(mdb.sequence) == 3 assert len(mdb.sequence['Pd1'].config_atoms) == 1 assert len(mdb.fitting_configs) == 3 assert len(mdb.rset) == 3
def test_AlMg_setup(AlMg): """Test the setup of the distortion database. """ assert not AlMg.collections[ 'distortion'].steps['distortion'].is_setup() AlMg.setup() dbs = "Distortion/distortion.distortion/Al6Mg4" folders = { "__files__": ["compute.pkl", "duids.pkl", "jobfile.sh", "index.json"], "D.1": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.2": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.3": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.4": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.5": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.6": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.7": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.8": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.9": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.10": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.11": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.12": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.13": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.14": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "D.15": { "__files__": ["INCAR", "PRECALC", "POSCAR"] } } from matdb.utility import compare_tree dbfolder = path.join(AlMg.root, dbs) compare_tree(dbfolder, folders) assert AlMg.collections['distortion'].steps[ 'distortion'].is_setup() # test the duid and index creation for the entire database. assert path.isfile(path.join(AlMg.root, "Distortion/distortion.distortion/duids.pkl")) assert path.isfile(path.join(AlMg.root, "Distortion/distortion.distortion/index.json")) dist = AlMg.collections['distortion'].steps['distortion'] assert len(dist.duids) == 50 assert len(dist.index) == 50 assert dist.ready()
def test_setup(AgPd): """Tetsts the setup of the enumerated database. """ assert not AgPd.collections['enumerated'].steps['enum'].is_setup() assert AgPd.collections['enumerated'].steps['enum'].fitting_configs == [] AgPd.setup() dbs = ["Enum/enumerated.enum/lat-{}".format(i) for i in (1, 2)] folders = { "__files__": [ "compute.pkl", "euids.pkl", "jobfile.sh", "enum.out", "lattice.in", "index.json" ], "E.1": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.2": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.3": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.4": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.5": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.6": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.7": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.8": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.9": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "E.10": { "__files__": ["INCAR", "PRECALC", "POSCAR"] } } from matdb.utility import compare_tree for db in dbs: dbfolder = path.join(AgPd.root, db) compare_tree(dbfolder, folders) assert AgPd.collections['enumerated'].steps['enum'].is_setup() # test the euid and index creation for the entire database. assert path.isfile(path.join(AgPd.root, "Enum/enumerated.enum/euids.pkl")) assert path.isfile(path.join(AgPd.root, "Enum/enumerated.enum/index.json")) enum = AgPd.collections['enumerated'].steps['enum'] assert len(enum.index) == 20 assert len(enum.euids) == 20 assert not enum.ready() # We need to fake some VASP output so that we can cleanup the # database and get the rset src = relpath("./tests/files/outcars/4_atom") for db in dbs: dbfolder = path.join(AgPd.root, db) for j in range(1, 11): dest = path.join(dbfolder, "E.{}".format(j), "OUTCAR") symlink(src, dest) for db in dbs: dbfolder = path.join(AgPd.root, db) for j in range(1, 11): src = path.join(dbfolder, "E.{}".format(j), "POSCAR") dest = path.join(dbfolder, "E.{}".format(j), "CONTCAR") symlink(src, dest) enum.extract() assert len(enum.rset) == 20
def test_all_active(Act): """Tetsts the setup of the Active database. """ assert (Act.last_iteration is None) or (len(Act.last_iteration) == 0) assert (Act.last_config_atoms is None) or (len(Act.last_config_atoms) == 0) add_configs(Act, 1) assert not Act.can_extract() assert Act.iter_file == path.join(Act.root, "iter_1.pkl") assert Act.nconfigs == 3 assert not Act.is_executing() Act.setup() assert len(Act.last_iteration) == 3 folders = { "__files__": ["compute.pkl", "auids.pkl", "jobfile.sh", "index.json", "iter_1.pkl", "active_Ac_uuid.txt"], "Ac.1": { "__files__": ["INCAR", "PRECALC", "POSCAR", "POTCAR", "pre_comp_atoms.h5", "uuid.txt"] }, "Ac.2": { "__files__": ["INCAR", "PRECALC", "POSCAR", "POTCAR", "pre_comp_atoms.h5", "uuid.txt"] }, "Ac.3": { "__files__": ["INCAR", "PRECALC", "POSCAR", "POTCAR", "pre_comp_atoms.h5", "uuid.txt"] } } dbfolder = Act.root compare_tree(dbfolder,folders) assert Act.is_setup() assert not Act.ready() # We need to fake some VASP output so that we can cleanup the # database and get the rset src = relpath("./tests/data/Pd/complete/OUTCAR__DynMatrix_phonon_Pd_dim-2.00") dbfolder = Act.root for j in range(1,4): dest = path.join(dbfolder,"Ac.{}".format(j),"OUTCAR") symlink(src,dest) dbfolder = Act.root for j in range(1,4): src = path.join(dbfolder,"Ac.{}".format(j),"POSCAR") dest = path.join(dbfolder,"Ac.{}".format(j),"CONTCAR") symlink(src,dest) remove(path.join(Act.root, "Ac.1", "pre_comp_atoms.h5")) Act.extract() assert len(Act.config_atoms) == 3 assert len(Act.configs) == 3 assert len(Act.last_config_atoms) == 3 assert len(Act.rset) == 3 assert not Act.is_executing() remove(path.join(Act.root, "Ac.1", "OUTCAR")) dest = path.join(Act.root, "Ac.1", "OUTCAR") src = relpath("./tests/data/Pd/basic_fail/S.4/OUTCAR") symlink(src,dest) assert Act.is_executing() # should not be executable, because it's been executing assert not Act.execute() # test the addition of a second set of new configs # first config is to test if the exclusion of duplicates works. configs = [] atSi = Atoms("Si8",positions=[[0,0,0],[0.25,0.25,0.25],[0.5,0.5,0],[0.75,0.75,0.25], [0.5,0,0.5],[0.75,0.25,0.75],[0,0.5,0.5],[0.25,0.75,0.75]], cell=[5.43,5.43,5.43]) configs.append(atSi) atSi = Atoms("Si",positions=[[0,0,0]], cell=[3,3,3]) configs.append(atSi) atSi = Atoms("Si2",positions=[[0,0,0], [0.5,0.75,0.25]], cell=[3.43,3.43,3.43]) configs.append(atSi) Act.add_configs(configs, 2) assert Act.iter_file == path.join(Act.root, "iter_2.pkl") assert Act.nconfigs == 6 assert not Act.is_executing() Act.setup() for cfg in Act.iconfigs: assert "Si" in cfg.get_chemical_symbols() assert Act.nconfigs == 5 assert len(Act.last_config_atoms) == 2 Act.iter_file = path.join(Act.root, "iter_1.pkl") Act._load_last_iter() assert len(Act.last_iteration) == 3
def test_AlTi_setup(AlTi): """Test the setup of the vacancy database. """ assert not AlTi.collections['vacancy'].steps['Vacancy'].is_setup() AlTi.setup() dbs = "Vacancy/vacancy.Vacancy/Al14Ti6" folders = { "__files__": ["compute.pkl", "vuids.pkl", "jobfile.sh", "index.json"], "V.1": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "V.2": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "V.3": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "V.4": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "V.5": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "V.6": { "__files__": ["INCAR", "PRECALC", "POSCAR"] }, "V.7": { "__files__": ["INCAR", "PRECALC", "POSCAR"] } } from matdb.utility import compare_tree dbfolder = path.join(AlTi.root, dbs) compare_tree(dbfolder, folders) assert AlTi.collections['vacancy'].steps['Vacancy'].is_setup() # test the vuid and index creation for the entire database. assert path.isfile( path.join(AlTi.root, "Vacancy/vacancy.Vacancy/vuids.pkl")) assert path.isfile( path.join(AlTi.root, "Vacancy/vacancy.Vacancy/index.json")) vac = AlTi.collections['vacancy'].steps['Vacancy'] assert len(vac.index) == 50 assert len(vac.vuids) == 50 assert vac.ready() # We need to fake some VASP output so that we can cleanup the # database and get the rset src = relpath( "./tests/data/Pd/complete/OUTCAR__DynMatrix_phonon_Pd_dim-27.00") dbfolder = path.join(AlTi.root, dbs) for j in range(1, 51): dest = path.join(dbfolder, "V.{}".format(j), "OUTCAR") symlink(src, dest) dbfolder = path.join(AlTi.root, dbs) for j in range(1, 51): src = path.join(dbfolder, "V.{}".format(j), "POSCAR") dest = path.join(dbfolder, "V.{}".format(j), "CONTCAR") symlink(src, dest) assert len(vac.atoms_paths()) == 50 assert len(vac.rset()) == 50