def test_dojodata(self): """Testing dojodata.py script""" env = self.get_env() # Test dojodata table path = dojotable_absdir("ONCVPSP-PBE-PDv0.3") env.run(self.script, "table", path, self.loglevel, self.verbose)
def test_from_djson(self): """Initializing DojoTable from djson file.""" djson_path = os.path.join(dojotable_absdir("ONCVPSP-PBE"), "accuracy.djson") table = DojoTable.from_djson(djson_path) # The table must have a dojo_info dict print(table.dojo_info) assert table.dojo_info
def test_from_dojodir(self): """Initializing DojoTable from directory.""" table = DojoTable.from_dojodir(dojotable_absdir("ONCVPSP-PBE-PDv0.3")) repr(table); str(table) # Produce template file for djson file. assert isinstance(table.to_djson(), dict) # This table contains multiple pseudos for element! # dojo_check_errors should detect it. md5dict = {p.basename: p.md5 for p in table} errors = table.dojo_find_errors(md5dict=md5dict, require_hints=False) assert errors # Test Dojo DataFrame dojo_frame, errors = table.get_dojo_dataframe() #print(dojo_frame) # TODO #if errors: # print("Found errors in dojotable:") # pprint(errors) #assert not errors # Write ipython notebook if self.has_nbformat(): table.write_notebook() # Test helper functions #dojo_frame.tabulate() # Test myfamilies and select_families assert isinstance(dojo_frame.select_family("alkaline"), dojo_frame.__class__) myfamilies = dojo_frame.myfamilies() assert myfamilies for family in myfamilies: f = dojo_frame.select_family(family) assert len(f) and isinstance(f, dojo_frame.__class__) # Test myrows and select_rows myrows = dojo_frame.myrows() assert myrows for row in myrows: f = dojo_frame.select_rows(row) assert len(f) and isinstance(f, dojo_frame.__class__) assert isinstance(dojo_frame.select_rows([1, 3]), dojo_frame.__class__) # Plot tools if self.has_matplotlib(): dojo_frame.plot_hist(show=False) dojo_frame.plot_trials(show=False) # Test DeltaFactor, GBRV DataFrame dfgbrv_frame = table.get_dfgbrv_dataframe() if self.has_matplotlib(): dfgbrv_frame.plot_dfgbrv_dist(show=False)
def __init__(self, table_dir, djson_name): """ Args: table_dir: basename of the directory containing the pseudos djson_name: name of the json file in `table_dir` with the list of pseudos and metatada. """ self.table_dir = table_dir self.dojo_absdir = dojotable_absdir(table_dir) self.djson_name = djson_name self.djson_path = os.path.join(self.dojo_absdir, djson_name)
def test_from_dojodir(self): """Initializing DojoTable from directory.""" table = DojoTable.from_dojodir(dojotable_absdir("ONCVPSP-PBE")) # This table contains multiple pseudos for element! # and dojo_check_errors should detect it. md5dict = {p.basename: p.md5 for p in table} errors = table.dojo_check_errors(md5dict=md5dict, require_hints=False) print(errors) assert errors
def from_dojodir(cls,dojodir,accuracy='standard'): """Use a dojodir string to get a djson file and initialize the class""" import glob from pseudo_dojo.pseudos import dojotable_absdir dojodir_path = dojotable_absdir(dojodir) djson_path = os.path.join(dojodir_path,accuracy+'.djson') if not os.path.isfile(djson_path): filenames = glob.glob(os.path.join(dojodir_path,"*.djson")) accuracies = [os.path.basename(filename).replace('.djson','') for filename in filenames] raise FileNotFoundError("File {} does not exist. " "Found djson files for accuracy = {}".format(djson_path,accuracies)) return cls.from_djson_file(djson_path)
def test_db_update(self): """Testing DB update""" dirpath = dojotable_absdir("ONCVPSP-PBE") # Init an empty object. outdb = RocksaltOutdb.new_from_dojodir(dirpath) # No change here u = outdb.check_update() print(u) assert u.nrec_added == 0 and u.nrec_removed == 0 # Now I hack a bit the object to simulate a pseudo that has been removed new_table = [p for p in outdb.dojo_pptable if p.basename != "Si.psp8"] outdb.dojo_pptable = DojoTable.as_table(new_table) # TODO: u = outdb.check_update() print(u) assert u.nrec_added == 0 and u.nrec_removed == 0
def test_db_update(self): """Testing DB update""" return dirpath = dojotable_absdir("ONCVPSP-PBE") # Init an empty object. outdb = RocksaltOutdb.new_from_dojodir(dirpath) # No change here u = outdb.check_update() print(u) assert u.nrec_added == 0 and u.nrec_removed == 0 # Now I hack a bit the object to simulate a pseudo that has been removed new_table = [p for p in outdb.dojo_pptable if p.basename != "Si.psp8"] outdb.dojo_pptable = DojoTable.as_table(new_table) # TODO: u = outdb.check_update() print(u) assert u.nrec_added == 0 and u.nrec_removed == 0
def from_djson_file(cls, json_path): """ Initialize the pseudopotential table from one of **official** djson files located in one of the subdirectories inside pseudo_dojo.pseudos. json_path contains the following dictionary in JSON format: { "dojo_info": { "pp_type": "NC", "xc_name": "PBE", "authors": ["J. Doe",], "generation_date": "2015-07-20", "description": "String", "tags": ["accuracy", "tag2"], "reference": "paper", "dojo_dir": "ONCVPSP-PBE", }, "pseudos_metadata": { "Si": { "basename": "Si-dloc.psp8", "Z_val": 4.0, "l_max": 2, "md5": "ececcf5b26f34676694b630d6bc809ff" }, "O": { "basename": "O-dmax.psp8", "Z_val": 6.0, "l_max": 2, "md5": "f7d0f3573362d89c81c41fc6b7b3e6ab" } } } """ json_path = os.path.abspath(json_path) with open(json_path, "rt") as fh: d = json.load(fh) # Read and validate dojo_info. dojo_info = DojoInfo(**d["dojo_info"]) try: dojo_info.validate_json_schema() except Exception as exc: print("Validation error in %s" % json_path) raise exc meta = d["pseudos_metadata"] if dojo_info.get("dojo_dir", None): from pseudo_dojo.pseudos import dojotable_absdir top = dojotable_absdir(dojo_info.dojo_dir) else: top = os.path.dirname(json_path) paths, md5dict = [], {} for esymb, m in meta.items(): if isinstance(m, (list, tuple)): raise TypeError("Invalid djson file. Expecting dict but got list (multiple pseudos):\n\n %s" % str(m)) path = os.path.join(top, esymb, m["basename"]) paths.append(path) md5dict[m["basename"]] = m["md5"] # TODO: Avoid parsing the pseudos. Construct them from dict. new = cls(paths).sort_by_z() new.set_dojo_info(dojo_info) # TODO: To be activated #errors = new.dojo_find_errors(md5dict) #if errors: # raise ValueError("\n".join(errors)) return new
def test_rocksalt_outdb(self): """Testing RocksaltOutdb database and its API.""" return dirpath = dojotable_absdir("ONCVPSP-PBE") # Test the initialization of an empty object. outdb = RocksaltOutdb.new_from_dojodir(dirpath) #outdb.dojo_dir = "dummy_dir" #print(outdb) assert outdb.struct_type == "rocksalt" # Check that outdb supports pickle because the works will get a reference to it. self.serialize_with_pickle(outdb, protocols=None, test_eq=True) # Dict protocol assert "LiF" in outdb and "LiF" in outdb.keys() records = outdb["LiF"] # Test records (dict-like objects) supporting __eq__ and __ne__ for rec in records: assert rec.formula == "LiF" assert rec["normal"] is None and rec["high"] is None assert "pseudos_metadata" in rec assert not rec.has_data("normal") d = rec.as_dict() same_rec = GbrvRecord.from_dict(d, outdb.struct_type, rec.dojo_pptable) #print(rec) assert same_rec == rec for formula, records in outdb.items(): # Test find_record for rec in records: same_rec = outdb.find_record(formula, rec.pseudos) #assert rec.matches_pseudos(same_rec.pseudos) assert rec == same_rec # All the records for the same formula should be different! if len(records) > 1: for rec1, rec2 in zip(records[:-1], records[1:]): assert rec1 != rec2 # Here I compare all the records in the database! all_records = [] for records in outdb.values(): all_records.extend(records) for rec1, rec2 in zip(all_records[:-1], all_records[1:]): assert rec1 != rec2 assert not rec1.matches_pseudos(rec2.pseudos) # Test pandas dataframe frame = outdb.get_dataframe() assert frame is not None # Test matplotlib tools outdb.plot_errors() # Test API to extract jobs jobs = outdb.find_jobs_torun(max_njobs=3) assert len(jobs) == 3 # Retrieve the record from the job params and make sure # the entry is set to scheduled. for job in jobs: rec = outdb.find_record(job.formula, job.pseudos) assert rec[job.accuracy] == "scheduled" # Write the object in json format filepath = "dummy.json" outdb.json_write(filepath=filepath) # And new we re-read it from file. new_outdb = GbrvOutdb.from_file(filepath) assert new_outdb.struct_type == outdb.struct_type assert len(new_outdb) == len(outdb) # NB: This works because all values support __eq__ assert new_outdb == outdb
def test_rocksalt_outdb(self): """Testing RocksaltOutdb database and its API.""" return dirpath = dojotable_absdir("ONCVPSP-PBE") # Test the initialization of an empty object. outdb = RocksaltOutdb.new_from_dojodir(dirpath) #outdb.dojo_dir = "dummy_dir" #print(outdb) assert outdb.struct_type == "rocksalt" # Check that outdb supports pickle because the works will get a reference to it. self.serialize_with_pickle(outdb, protocols=None, test_eq=True) # Dict protocol assert "LiF" in outdb and "LiF" in outdb.keys() records = outdb["LiF"] # Test records (dict-like objects) supporting __eq__ and __ne__ for rec in records: assert rec.formula == "LiF" assert rec["normal"] is None and rec["high"] is None assert "pseudos_metadata" in rec assert not rec.has_data("normal") d = rec.as_dict() assert isinstance(d, dict) same_rec = GbrvRecord.from_dict(d, outdb.struct_type, rec.dojo_pptable) #print(rec) assert same_rec == rec for formula, records in outdb.items(): # Test find_record for rec in records: same_rec = outdb.find_record(formula, rec.pseudos) #assert rec.matches_pseudos(same_rec.pseudos) assert rec == same_rec # All the records for the same formula should be different! if len(records) > 1: for rec1, rec2 in zip(records[:-1], records[1:]): assert rec1 != rec2 # Here I compare all the records in the database! all_records = [] for records in outdb.values(): all_records.extend(records) for rec1, rec2 in zip(all_records[:-1], all_records[1:]): assert rec1 != rec2 assert not rec1.matches_pseudos(rec2.pseudos) # Test pandas dataframe frame = outdb.get_dataframe() assert frame is not None # Test matplotlib tools if self.has_matplotlib(): outdb.plot_errors() # Test API to extract jobs jobs = outdb.find_jobs_torun(max_njobs=3) assert len(jobs) == 3 # Retrieve the record from the job params and make sure # the entry is set to scheduled. for job in jobs: rec = outdb.find_record(job.formula, job.pseudos) assert rec[job.accuracy] == "scheduled" # Write the object in json format filepath = "dummy.json" outdb.json_write(filepath=filepath) # And new we re-read it from file. new_outdb = GbrvOutdb.from_file(filepath) assert new_outdb.struct_type == outdb.struct_type assert len(new_outdb) == len(outdb) # NB: This works because all values support __eq__ assert new_outdb == outdb