def submit_all_snl(min=None, max=None): constraints = { 'is_ordered': True, 'is_valid': True, 'nsites': { '$lte': 200 }, 'canonical_snl.about.projects': { '$ne': 'CederDahn Challenge' } } constraints['elements'] = {'$nin': NO_POTCARS} constraints['canonical_snl.about.history.name'] = { "$ne": "Materials Project structure optimization" } constraints['canonical_snl.about.remarks'] = {"$ne": "DEPRECATED"} if min and max: constraints['snlgroup_id'] = {'$gte': min, '$lte': max} elif min or max: raise ValueError('Must specify both min AND max if you specify one') snldb = SNLMongoAdapter.auto_load() sma = SubmissionMongoAdapter.auto_load() for result in snldb.snlgroups.find(constraints, { 'canonical_snl': 1, 'snlgroup_id': 1 }): snl = MPStructureNL.from_dict(result['canonical_snl']) parameters = {'snlgroup_id': result['snlgroup_id']} sma.submit_snl(snl, 'Anubhav Jain <*****@*****.**>', parameters=parameters)
def run_task(self, fw_spec): sma = SNLMongoAdapter.auto_load() snl = fw_spec['snl'] mpsnl, snlgroup_id, spec_group = sma.add_snl(snl) mod_spec = [{"_push": {"run_tags": "species_group={}".format(spec_group)}}] if spec_group else None return FWAction(update_spec={'mpsnl': mpsnl.as_dict(), 'snlgroup_id': snlgroup_id}, mod_spec=mod_spec)
def clear_env(): sma = SubmissionMongoAdapter.auto_load() lp = LaunchPad.auto_load() snl = SNLMongoAdapter.auto_load() db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') with open(db_path) as f: db_creds = json.load(f) sma._reset() lp.reset('', require_password=False) snl._reset() conn = MongoClient(db_creds['host'], db_creds['port']) db = conn[db_creds['database']] db.authenticate(db_creds['admin_user'], db_creds['admin_password']) db.tasks.remove() db.boltztrap.remove() db.counter.remove() db['dos_fs.chunks'].remove() db['dos_fs.files'].remove() db['band_structure_fs.files'].remove() db['band_structure_fs.files'].remove()
def clear_env(): sma = SubmissionMongoAdapter.auto_load() lp = LaunchPad.auto_load() snl = SNLMongoAdapter.auto_load() db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') with open(db_path) as f: db_creds = json.load(f) sma._reset() lp.reset('', require_password=False) snl._reset() conn = MongoClient(db_creds['host'], db_creds['port']) db = conn[db_creds['database']] db.authenticate(db_creds['admin_user'], db_creds['admin_password']) db.tasks.remove() db.boltztrap.remove() db.counter.remove() db['dos_fs.chunks'].remove() db['dos_fs.files'].remove() db['band_structure_fs.files'].remove() db['band_structure_fs.files'].remove()
def run_task(self, fw_spec): # pass-through option for when we start with an mpsnl and don't actually want to add if 'force_mpsnl' in fw_spec and 'force_snlgroup_id' in fw_spec: print 'USING FORCED MPSNL' return FWAction(update_spec={'mpsnl': fw_spec['force_mpsnl'], 'snlgroup_id': fw_spec['force_snlgroup_id']}) sma = SNLMongoAdapter.auto_load() snl = StructureNL.from_dict(fw_spec['snl']) mpsnl, snlgroup_id = sma.add_snl(snl) return FWAction(update_spec={'mpsnl': mpsnl.to_dict, 'snlgroup_id': snlgroup_id})
def run_task(self, fw_spec): # get the SNL mongo adapter sma = SNLMongoAdapter.auto_load() # get the SNL snl = StructureNL.from_dict(fw_spec['snl']) # add snl mpsnl, snlgroup_id = sma.add_snl(snl) return FWAction(update_spec={'mpsnl': mpsnl.to_dict, 'snlgroup_id': snlgroup_id})
def run_task(self, fw_spec): sma = SNLMongoAdapter.auto_load() snl = StructureNL.from_dict(fw_spec['snl']) mpsnl, snlgroup_id, spec_group = sma.add_snl(snl) mod_spec = [{ "_push": { "run_tags": "species_group={}".format(spec_group) } }] if spec_group else None return FWAction(update_spec={ 'mpsnl': mpsnl.to_dict, 'snlgroup_id': snlgroup_id }, mod_spec=mod_spec)
def run_task(self, fw_spec): # pass-through option for when we start with an mpsnl and don't actually want to add if 'force_mpsnl' in fw_spec and 'force_snlgroup_id' in fw_spec: print 'USING FORCED MPSNL' return FWAction( update_spec={ 'mpsnl': fw_spec['force_mpsnl'], 'snlgroup_id': fw_spec['force_snlgroup_id'] }) sma = SNLMongoAdapter.auto_load() snl = StructureNL.from_dict(fw_spec['snl']) mpsnl, snlgroup_id = sma.add_snl(snl) return FWAction(update_spec={ 'mpsnl': mpsnl.to_dict, 'snlgroup_id': snlgroup_id })
def submit_all_snl(min=None, max=None): constraints = {'is_ordered': True, 'is_valid': True, 'nsites': {'$lte': 200}, 'canonical_snl.about.projects': {'$ne': 'CederDahn Challenge'}} constraints['elements'] = {'$nin': NO_POTCARS} constraints['canonical_snl.about.history.name'] = {"$ne":"Materials Project structure optimization"} constraints['canonical_snl.about.remarks'] = {"$ne": "DEPRECATED"} if min and max: constraints['snlgroup_id'] = {'$gte': min, '$lte': max} elif min or max: raise ValueError('Must specify both min AND max if you specify one') snldb = SNLMongoAdapter.auto_load() sma = SubmissionMongoAdapter.auto_load() for result in snldb.snlgroups.find(constraints, {'canonical_snl': 1, 'snlgroup_id': 1}): snl = MPStructureNL.from_dict(result['canonical_snl']) parameters = {'snlgroup_id': result['snlgroup_id']} sma.submit_snl(snl, 'Anubhav Jain <*****@*****.**>', parameters=parameters)
def resubmit(self, submission_id, snl_db=None): # see if an SNL object has already been created if not snl_db: snl_db = SNLMongoAdapter.auto_load() mpsnl = None snlgroup_id = None snl_dict = snl_db.snl.find_one({"about._materialsproject.submission_id": submission_id}) if snl_dict: mpsnl = MPStructureNL.from_dict(snl_dict) snlgroup_id = snl_db.snlgroups.find_one({"all_snl_ids": snl_dict['snl_id']}, {"snlgroup_id":1})['snlgroup_id'] # Now reset the current submission parameters updates = {'state': 'SUBMITTED', 'state_details': {}, 'task_dict': {}} if mpsnl: updates['parameters'] = self.jobs.find_one({'submission_id': submission_id}, {'parameters': 1})['parameters'] updates['parameters'].update({"mpsnl": mpsnl.as_dict(), "snlgroup_id": snlgroup_id}) self.jobs.find_and_modify({'submission_id': submission_id}, {'$set': updates})
def resubmit(self, submission_id, snl_db=None): # see if an SNL object has already been created if not snl_db: snl_db = SNLMongoAdapter.auto_load() mpsnl = None snlgroup_id = None snl_dict = snl_db.snl.find_one({"about._materialsproject.submission_id": submission_id}) if snl_dict: mpsnl = MPStructureNL.from_dict(snl_dict) snlgroup_id = snl_db.snlgroups.find_one({"all_snl_ids": snl_dict['snl_id']}, {"snlgroup_id":1})['snlgroup_id'] # Now reset the current submission parameters updates = {'state': 'SUBMITTED', 'state_details': {}, 'task_dict': {}} if mpsnl: updates['parameters'] = self.jobs.find_one({'submission_id': submission_id}, {'parameters': 1})['parameters'] updates['parameters'].update({"mpsnl": mpsnl.to_dict, "snlgroup_id": snlgroup_id}) self.jobs.find_and_modify({'submission_id': submission_id}, {'$set': updates})
def process_fw(self, old_task, d): # AJ - this whole section is different sma = SNLMongoAdapter.auto_load() d['old_engine'] = old_task.get('engine') if 'fw_id' in old_task: d['old_fw_id'] = old_task['fw_id'] d['fw_id'] = None d['task_type'] = 'GGA+U optimize structure (2x)' if old_task[ 'is_hubbard'] else 'GGA optimize structure (2x)' d['submission_id'] = None d['vaspinputset_name'] = None snl_d = sma.snl.find_one({'about._materialsproject.deprecated.mps_ids': old_task['mps_id']}) if old_task.get('mps_id', -1) > 0 and snl_d: # grab the SNL from the SNL db del snl_d['_id'] d['snl'] = snl_d d['snlgroup_id'] = sma.snlgroups.find_one({'all_snl_ids': d['snl']['snl_id']}, {'snlgroup_id': 1})['snlgroup_id'] elif 'mps' in old_task and old_task['mps']: snl = mps_dict_to_snl(old_task['mps']) mpsnl, snlgroup_id = sma.add_snl(snl) d['snl'] = mpsnl.to_dict d['snlgroup_id'] = snlgroup_id else: s = Structure.from_dict(old_task['input']['crystal']) snl = StructureNL(s, 'Anubhav Jain <*****@*****.**>', remarks=['origin unknown']) mpsnl, snlgroup_id = sma.add_snl(snl) d['snl'] = mpsnl.to_dict d['snlgroup_id'] = snlgroup_id if 'optimize structure' in d['task_type'] and 'output' in d: # create a new SNL based on optimized structure new_s = Structure.from_dict(d['output']['crystal']) old_snl = StructureNL.from_dict(d['snl']) history = old_snl.history history.append( {'name': 'Materials Project structure optimization', 'url': 'http://www.materialsproject.org', 'description': {'task_type': d['task_type'], 'fw_id': d['fw_id'], 'task_id': d['task_id']}}) new_snl = StructureNL(new_s, old_snl.authors, old_snl.projects, old_snl.references, old_snl.remarks, old_snl.data, history) # add snl mpsnl, snlgroup_id = sma.add_snl(new_snl, snlgroup_guess=d['snlgroup_id']) d['snl_final'] = mpsnl.to_dict d['snlgroup_id_final'] = snlgroup_id d['snlgroup_changed'] = (d['snlgroup_id'] != d['snlgroup_id_final']) # custom processing for detecting errors dir_name = old_task['dir_name'] new_style = os.path.exists(os.path.join(dir_name, 'FW.json')) vasp_signals = {} critical_errors = ["INPUTS_DONT_EXIST", "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS", "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED", "CHARGE_UNCONVERGED", "NETWORK_QUIESCED", "HARD_KILLED", "WALLTIME_EXCEEDED", "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED"] last_relax_dir = dir_name if not new_style: # get the last relaxation dir # the order is relax2, current dir, then relax1. This is because # after completing relax1, the job happens in the current dir. # Finally, it gets moved to relax2. # There are some weird cases where both the current dir and relax2 # contain data. The relax2 is good, but the current dir is bad. if is_valid_vasp_dir(os.path.join(dir_name, "relax2")): last_relax_dir = os.path.join(dir_name, "relax2") elif is_valid_vasp_dir(dir_name): pass elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")): last_relax_dir = os.path.join(dir_name, "relax1") vasp_signals['last_relax_dir'] = last_relax_dir ## see what error signals are present print "getting signals for dir :{}".format(last_relax_dir) sl = SignalDetectorList() sl.append(VASPInputsExistSignal()) sl.append(VASPOutputsExistSignal()) sl.append(VASPOutSignal()) sl.append(HitAMemberSignal()) sl.append(SegFaultSignal()) sl.append(VASPStartedCompletedSignal()) signals = sl.detect_all(last_relax_dir) signals = signals.union(WallTimeSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(WallTimeSignal().detect(root_dir)) signals = signals.union(DiskSpaceExceededSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(DiskSpaceExceededSignal().detect(root_dir)) signals = list(signals) critical_signals = [val for val in signals if val in critical_errors] vasp_signals['signals'] = signals vasp_signals['critical_signals'] = critical_signals vasp_signals['num_signals'] = len(signals) vasp_signals['num_critical'] = len(critical_signals) if len(critical_signals) > 0 and d['state'] == "successful": d["state"] = "error" d['analysis'] = d.get('analysis', {}) d['analysis']['errors_MP'] = vasp_signals d['run_tags'] = ['PBE'] d['run_tags'].extend(d['pseudo_potential']['labels']) d['run_tags'].extend([e+"="+str(d['hubbards'].get(e, 0)) for e in d['elements']])
def process_fw(self, dir_name, d): d["task_id_deprecated"] = int(d["task_id"].split('-')[-1]) # useful for WC and AJ # update the run fields to give species group in root, if exists for r in d['run_tags']: if "species_group=" in r: d["species_group"] = int(r.split("=")[-1]) break # custom Materials Project post-processing for FireWorks with zopen(zpath(os.path.join(dir_name, 'FW.json'))) as f: fw_dict = json.load(f) d['fw_id'] = fw_dict['fw_id'] d['snl'] = fw_dict['spec']['mpsnl'] d['snlgroup_id'] = fw_dict['spec']['snlgroup_id'] d['vaspinputset_name'] = fw_dict['spec'].get('vaspinputset_name') d['task_type'] = fw_dict['spec']['task_type'] # Process data for deformed structures if 'deformed' in d['task_type']: d['deformation_matrix'] = fw_dict['spec']['deformation_matrix'] d['original_task_id'] = fw_dict['spec']['original_task_id'] if not self.update_duplicates: if 'optimize structure' in d['task_type'] and 'output' in d: # create a new SNL based on optimized structure new_s = Structure.from_dict(d['output']['crystal']) old_snl = StructureNL.from_dict(d['snl']) history = old_snl.history history.append( {'name': 'Materials Project structure optimization', 'url': 'http://www.materialsproject.org', 'description': {'task_type': d['task_type'], 'fw_id': d['fw_id'], 'task_id': d['task_id']}}) new_snl = StructureNL(new_s, old_snl.authors, old_snl.projects, old_snl.references, old_snl.remarks, old_snl.data, history) # enter new SNL into SNL db # get the SNL mongo adapter sma = SNLMongoAdapter.auto_load() # add snl mpsnl, snlgroup_id, spec_group = sma.add_snl(new_snl, snlgroup_guess=d['snlgroup_id']) d['snl_final'] = mpsnl.as_dict() d['snlgroup_id_final'] = snlgroup_id d['snlgroup_changed'] = (d['snlgroup_id'] != d['snlgroup_id_final']) else: d['snl_final'] = d['snl'] d['snlgroup_id_final'] = d['snlgroup_id'] d['snlgroup_changed'] = False # custom processing for detecting errors new_style = os.path.exists(zpath(os.path.join(dir_name, 'FW.json'))) vasp_signals = {} critical_errors = ["INPUTS_DONT_EXIST", "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS", "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED", "CHARGE_UNCONVERGED", "NETWORK_QUIESCED", "HARD_KILLED", "WALLTIME_EXCEEDED", "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED", "NO_RELAX2", "POSITIVE_ENERGY"] last_relax_dir = dir_name if not new_style: # get the last relaxation dir # the order is relax2, current dir, then relax1. This is because # after completing relax1, the job happens in the current dir. # Finally, it gets moved to relax2. # There are some weird cases where both the current dir and relax2 # contain data. The relax2 is good, but the current dir is bad. if is_valid_vasp_dir(os.path.join(dir_name, "relax2")): last_relax_dir = os.path.join(dir_name, "relax2") elif is_valid_vasp_dir(dir_name): pass elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")): last_relax_dir = os.path.join(dir_name, "relax1") vasp_signals['last_relax_dir'] = last_relax_dir ## see what error signals are present print "getting signals for dir :{}".format(last_relax_dir) sl = SignalDetectorList() sl.append(VASPInputsExistSignal()) sl.append(VASPOutputsExistSignal()) sl.append(VASPOutSignal()) sl.append(HitAMemberSignal()) sl.append(SegFaultSignal()) sl.append(VASPStartedCompletedSignal()) if d['state'] == 'successful' and 'optimize structure' in d['task_type']: sl.append(Relax2ExistsSignal()) signals = sl.detect_all(last_relax_dir) signals = signals.union(WallTimeSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(WallTimeSignal().detect(root_dir)) signals = signals.union(DiskSpaceExceededSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(DiskSpaceExceededSignal().detect(root_dir)) if d.get('output',{}).get('final_energy', None) > 0: signals.add('POSITIVE_ENERGY') signals = list(signals) critical_signals = [val for val in signals if val in critical_errors] vasp_signals['signals'] = signals vasp_signals['critical_signals'] = critical_signals vasp_signals['num_signals'] = len(signals) vasp_signals['num_critical'] = len(critical_signals) if len(critical_signals) > 0 and d['state'] == "successful": d["state"] = "error" d['analysis'] = d.get('analysis', {}) d['analysis']['errors_MP'] = vasp_signals
from mpworks.snl_utils.snl_mongo import SNLMongoAdapter from mpworks.snl_utils.mpsnl import MPStructureNL, SNLGroup from pymatgen.symmetry.analyzer import SpacegroupAnalyzer from pymatgen.analysis.structure_matcher import StructureMatcher, ElementComparator, SpeciesComparator import plotly.plotly as py import plotly.tools as tls from plotly.graph_objs import * from mpworks.check_snl.utils import div_plus_mod, sleep from ast import literal_eval as make_tuple from itertools import chain creds = tls.get_credentials_file() stream_ids = creds['stream_ids'] min_sleep = 0.052 sma = SNLMongoAdapter.auto_load() matcher = StructureMatcher(ltol=0.2, stol=0.3, angle_tol=5, primitive_cell=True, scale=True, attempt_supercell=False, comparator=ElementComparator()) num_ids_per_stream = 20000 num_ids_per_stream_k = num_ids_per_stream / 1000 num_snls = sma.snl.count() num_snlgroups = sma.snlgroups.count() num_pairs_per_job = 1000 * num_ids_per_stream num_pairs_max = num_snlgroups * (num_snlgroups - 1) / 2
def process_fw(self, dir_name, d): d["task_id_deprecated"] = int( d["task_id"].split('-')[-1]) # useful for WC and AJ # update the run fields to give species group in root, if exists for r in d['run_tags']: if "species_group=" in r: d["species_group"] = int(r.split("=")[-1]) break # custom Materials Project post-processing for FireWorks with zopen(zpath(os.path.join(dir_name, 'FW.json'))) as f: fw_dict = json.load(f) d['fw_id'] = fw_dict['fw_id'] d['snl'] = fw_dict['spec']['mpsnl'] d['snlgroup_id'] = fw_dict['spec']['snlgroup_id'] d['vaspinputset_name'] = fw_dict['spec'].get('vaspinputset_name') d['task_type'] = fw_dict['spec']['task_type'] # Process data for deformed structures if 'deformed' in d['task_type']: d['deformation_matrix'] = fw_dict['spec']['deformation_matrix'] d['original_task_id'] = fw_dict['spec']['original_task_id'] if not self.update_duplicates: if 'optimize structure' in d['task_type'] and 'output' in d: # create a new SNL based on optimized structure new_s = Structure.from_dict(d['output']['crystal']) old_snl = StructureNL.from_dict(d['snl']) history = old_snl.history history.append({ 'name': 'Materials Project structure optimization', 'url': 'http://www.materialsproject.org', 'description': { 'task_type': d['task_type'], 'fw_id': d['fw_id'], 'task_id': d['task_id'] } }) new_snl = StructureNL(new_s, old_snl.authors, old_snl.projects, old_snl.references, old_snl.remarks, old_snl.data, history) # enter new SNL into SNL db # get the SNL mongo adapter sma = SNLMongoAdapter.auto_load() # add snl mpsnl, snlgroup_id, spec_group = sma.add_snl( new_snl, snlgroup_guess=d['snlgroup_id']) d['snl_final'] = mpsnl.as_dict() d['snlgroup_id_final'] = snlgroup_id d['snlgroup_changed'] = (d['snlgroup_id'] != d['snlgroup_id_final']) else: d['snl_final'] = d['snl'] d['snlgroup_id_final'] = d['snlgroup_id'] d['snlgroup_changed'] = False # custom processing for detecting errors new_style = os.path.exists(zpath(os.path.join(dir_name, 'FW.json'))) vasp_signals = {} critical_errors = [ "INPUTS_DONT_EXIST", "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS", "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED", "CHARGE_UNCONVERGED", "NETWORK_QUIESCED", "HARD_KILLED", "WALLTIME_EXCEEDED", "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED", "NO_RELAX2", "POSITIVE_ENERGY" ] last_relax_dir = dir_name if not new_style: # get the last relaxation dir # the order is relax2, current dir, then relax1. This is because # after completing relax1, the job happens in the current dir. # Finally, it gets moved to relax2. # There are some weird cases where both the current dir and relax2 # contain data. The relax2 is good, but the current dir is bad. if is_valid_vasp_dir(os.path.join(dir_name, "relax2")): last_relax_dir = os.path.join(dir_name, "relax2") elif is_valid_vasp_dir(dir_name): pass elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")): last_relax_dir = os.path.join(dir_name, "relax1") vasp_signals['last_relax_dir'] = last_relax_dir ## see what error signals are present print "getting signals for dir :{}".format(last_relax_dir) sl = SignalDetectorList() sl.append(VASPInputsExistSignal()) sl.append(VASPOutputsExistSignal()) sl.append(VASPOutSignal()) sl.append(HitAMemberSignal()) sl.append(SegFaultSignal()) sl.append(VASPStartedCompletedSignal()) if d['state'] == 'successful' and 'optimize structure' in d[ 'task_type']: sl.append(Relax2ExistsSignal()) signals = sl.detect_all(last_relax_dir) signals = signals.union(WallTimeSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(WallTimeSignal().detect(root_dir)) signals = signals.union(DiskSpaceExceededSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(DiskSpaceExceededSignal().detect(root_dir)) if d.get('output', {}).get('final_energy', None) > 0: signals.add('POSITIVE_ENERGY') signals = list(signals) critical_signals = [val for val in signals if val in critical_errors] vasp_signals['signals'] = signals vasp_signals['critical_signals'] = critical_signals vasp_signals['num_signals'] = len(signals) vasp_signals['num_critical'] = len(critical_signals) if len(critical_signals) > 0 and d['state'] == "successful": d["state"] = "error" d['analysis'] = d.get('analysis', {}) d['analysis']['errors_MP'] = vasp_signals
import csv from mpworks.snl_utils.snl_mongo import SNLMongoAdapter sma = SNLMongoAdapter.auto_load() with open('mpworks/check_snl/results/zero_occu_sites.csv', 'wb') as f: writer = csv.writer(f) writer.writerow([ 'snl_id', 'num_zero_occu_sites', 'icsd_id', 'is_valid', 'formula' ]) for doc in sma.snl.aggregate([ #{ '$match': { 'about._icsd.icsd_id': { '$exists': True } } }, { '$unwind': '$sites' }, { '$unwind': '$sites.species' }, { '$project': { 'snl_id': 1, 'sites.species.occu': 1, '_id': 0, 'about._icsd.icsd_id': 1, 'is_valid': 1, 'reduced_cell_formula_abc': 1 } }, { '$match': { 'sites.species.occu': 0.0 } }, { '$group': { '_id': '$snl_id', 'num_zero_occu_sites': { '$sum': 1 }, 'icsd_ids': { '$addToSet': '$about._icsd.icsd_id' }, 'is_valid': { '$addToSet': '$is_valid' }, 'formula': { '$addToSet': '$reduced_cell_formula_abc' } } }, ], cursor={}): icsd_id = doc['icsd_ids'][0] if len(doc['icsd_ids']) > 0 else '' row = [ doc['_id'], doc['num_zero_occu_sites'], icsd_id, doc['is_valid'][0], doc['formula'][0] ]
def process_fw(self, old_task, d): # AJ - this whole section is different sma = SNLMongoAdapter.auto_load() d["old_engine"] = old_task.get("engine") if "fw_id" in old_task: d["old_fw_id"] = old_task["fw_id"] d["fw_id"] = None d["task_type"] = "GGA+U optimize structure (2x)" if old_task["is_hubbard"] else "GGA optimize structure (2x)" d["submission_id"] = None d["vaspinputset_name"] = None snl_d = sma.snl.find_one({"about._materialsproject.deprecated.mps_ids": old_task["mps_id"]}) if old_task.get("mps_id", -1) > 0 and snl_d: # grab the SNL from the SNL db del snl_d["_id"] d["snl"] = snl_d d["snlgroup_id"] = sma.snlgroups.find_one({"all_snl_ids": d["snl"]["snl_id"]}, {"snlgroup_id": 1})[ "snlgroup_id" ] elif "mps" in old_task and old_task["mps"]: snl = mps_dict_to_snl(old_task["mps"]) mpsnl, snlgroup_id = sma.add_snl(snl) d["snl"] = mpsnl.as_dict() d["snlgroup_id"] = snlgroup_id else: s = Structure.from_dict(old_task["input"]["crystal"]) snl = StructureNL(s, "Anubhav Jain <*****@*****.**>", remarks=["origin unknown"]) mpsnl, snlgroup_id = sma.add_snl(snl) d["snl"] = mpsnl.as_dict() d["snlgroup_id"] = snlgroup_id if "optimize structure" in d["task_type"] and "output" in d: # create a new SNL based on optimized structure new_s = Structure.from_dict(d["output"]["crystal"]) old_snl = StructureNL.from_dict(d["snl"]) history = old_snl.history history.append( { "name": "Materials Project structure optimization", "url": "http://www.materialsproject.org", "description": {"task_type": d["task_type"], "fw_id": d["fw_id"], "task_id": d["task_id"]}, } ) new_snl = StructureNL( new_s, old_snl.authors, old_snl.projects, old_snl.references, old_snl.remarks, old_snl.data, history ) # add snl mpsnl, snlgroup_id = sma.add_snl(new_snl, snlgroup_guess=d["snlgroup_id"]) d["snl_final"] = mpsnl.as_dict() d["snlgroup_id_final"] = snlgroup_id d["snlgroup_changed"] = d["snlgroup_id"] != d["snlgroup_id_final"] # custom processing for detecting errors dir_name = old_task["dir_name"] new_style = os.path.exists(os.path.join(dir_name, "FW.json")) vasp_signals = {} critical_errors = [ "INPUTS_DONT_EXIST", "OUTPUTS_DONT_EXIST", "INCOHERENT_POTCARS", "VASP_HASNT_STARTED", "VASP_HASNT_COMPLETED", "CHARGE_UNCONVERGED", "NETWORK_QUIESCED", "HARD_KILLED", "WALLTIME_EXCEEDED", "ATOMS_TOO_CLOSE", "DISK_SPACE_EXCEEDED", ] last_relax_dir = dir_name if not new_style: # get the last relaxation dir # the order is relax2, current dir, then relax1. This is because # after completing relax1, the job happens in the current dir. # Finally, it gets moved to relax2. # There are some weird cases where both the current dir and relax2 # contain data. The relax2 is good, but the current dir is bad. if is_valid_vasp_dir(os.path.join(dir_name, "relax2")): last_relax_dir = os.path.join(dir_name, "relax2") elif is_valid_vasp_dir(dir_name): pass elif is_valid_vasp_dir(os.path.join(dir_name, "relax1")): last_relax_dir = os.path.join(dir_name, "relax1") vasp_signals["last_relax_dir"] = last_relax_dir ## see what error signals are present print "getting signals for dir :{}".format(last_relax_dir) sl = SignalDetectorList() sl.append(VASPInputsExistSignal()) sl.append(VASPOutputsExistSignal()) sl.append(VASPOutSignal()) sl.append(HitAMemberSignal()) sl.append(SegFaultSignal()) sl.append(VASPStartedCompletedSignal()) signals = sl.detect_all(last_relax_dir) signals = signals.union(WallTimeSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(WallTimeSignal().detect(root_dir)) signals = signals.union(DiskSpaceExceededSignal().detect(dir_name)) if not new_style: root_dir = os.path.dirname(dir_name) # one level above dir_name signals = signals.union(DiskSpaceExceededSignal().detect(root_dir)) signals = list(signals) critical_signals = [val for val in signals if val in critical_errors] vasp_signals["signals"] = signals vasp_signals["critical_signals"] = critical_signals vasp_signals["num_signals"] = len(signals) vasp_signals["num_critical"] = len(critical_signals) if len(critical_signals) > 0 and d["state"] == "successful": d["state"] = "error" d["analysis"] = d.get("analysis", {}) d["analysis"]["errors_MP"] = vasp_signals d["run_tags"] = ["PBE"] d["run_tags"].extend(d["pseudo_potential"]["labels"]) d["run_tags"].extend([e + "=" + str(d["hubbards"].get(e, 0)) for e in d["elements"]])