def run_task(self, fw_spec): # the FW.json/yaml file is mandatory to get the fw_id # no need to deserialize the whole FW try: fw_dict = loadfn('FW.json') except IOError: try: fw_dict = loadfn('FW.yaml') except IOError: raise RuntimeError("No FW.json nor FW.yaml file present: impossible to determine fw_id") fw_id = fw_dict['fw_id'] lp = LaunchPad.auto_load() wf = lp.get_wf_by_fw_id_lzyfw(fw_id) wf_module = importlib.import_module(wf.metadata['workflow_module']) wf_class = getattr(wf_module, wf.metadata['workflow_class']) get_results_method = getattr(wf_class, 'get_final_structure_and_history') #TODO: make this more general ... just to test right now ... results = get_results_method(wf) database = MongoDatabase.from_dict(fw_spec['mongo_database']) database.insert_entry({'structure': results['structure'], 'history': results['history']}) logging.info("Inserted data:\n something") return FWAction()
def clear_env(): sma = SubmissionMongoAdapter.auto_load() lp = LaunchPad.auto_load() snl = SNLMongoAdapter.auto_load() db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') with open(db_path) as f: db_creds = json.load(f) sma._reset() lp.reset('', require_password=False) snl._reset() conn = MongoClient(db_creds['host'], db_creds['port']) db = conn[db_creds['database']] db.authenticate(db_creds['admin_user'], db_creds['admin_password']) db.tasks.remove() db.boltztrap.remove() db.counter.remove() db['dos_fs.chunks'].remove() db['dos_fs.files'].remove() db['band_structure_fs.files'].remove() db['band_structure_fs.files'].remove()
def run_task(self, fw_spec): # the FW.json/yaml file is mandatory to get the fw_id # no need to deserialize the whole FW if '_add_launchpad_and_fw_id' in fw_spec: lp = self.launchpad fw_id = self.fw_id else: try: fw_dict = loadfn('FW.json') except IOError: try: fw_dict = loadfn('FW.yaml') except IOError: raise RuntimeError("Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] wf = lp.get_wf_by_fw_id_lzyfw(fw_id) deleted_files = [] # iterate over all the fws and launches for fw_id, fw in wf.id_fw.items(): for l in fw.launches+fw.archived_launches: l_dir = l.launch_dir deleted_files.extend(self.delete_files(os.path.join(l_dir, TMPDIR_NAME))) deleted_files.extend(self.delete_files(os.path.join(l_dir, INDIR_NAME))) deleted_files.extend(self.delete_files(os.path.join(l_dir, OUTDIR_NAME), self.out_exts)) logging.info("Deleted files:\n {}".format("\n".join(deleted_files))) return FWAction(stored_data={'deleted_files': deleted_files})
def run_task(self, fw_spec): self.db_data.connect_mongoengine() try: fw_dict = loadfn('FW.json') except IOError: try: fw_dict = loadfn('FW.yaml') except IOError: raise RuntimeError( "No FW.json nor FW.yaml file present: impossible to determine fw_id" ) fw_id = fw_dict['fw_id'] lp = LaunchPad.auto_load() wf = lp.get_wf_by_fw_id_lzyfw(fw_id) wf_module = importlib.import_module(wf.metadata['workflow_module']) wf_class = getattr(wf_module, wf.metadata['workflow_class']) get_results_method = getattr(wf_class, 'get_mongoengine_results') #TODO extend for multiple documents? document = get_results_method(wf) with self.db_data.switch_collection( document.__class__) as document.__class__: #TODO it would be better to try to remove automatically the FileFields already saved if the save of # the document fails. document.save()
def clear_env(): sma = SubmissionMongoAdapter.auto_load() lp = LaunchPad.auto_load() snl = SNLMongoAdapter.auto_load() db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') with open(db_path) as f: db_creds = json.load(f) sma._reset() lp.reset('', require_password=False) snl._reset() conn = MongoClient(db_creds['host'], db_creds['port']) db = conn[db_creds['database']] db.authenticate(db_creds['admin_user'], db_creds['admin_password']) db.tasks.remove() db.boltztrap.remove() db.counter.remove() db['dos_fs.chunks'].remove() db['dos_fs.files'].remove() db['band_structure_fs.files'].remove() db['band_structure_fs.files'].remove()
def get_lp_and_fw_id_from_task(task, fw_spec): """ Given an instance of a running task and its spec, tries to load the LaunchPad and the current fw_id. It will first check for "_add_launchpad_and_fw_id", then try to load from FW.json/FW.yaml file. Should be used inside tasks that require to access to the LaunchPad and to the whole workflow. Args: task: An instance of a running task fw_spec: The spec of the task Returns: an instance of LaunchPah and the fw_id of the current task """ if '_add_launchpad_and_fw_id' in fw_spec: lp = task.launchpad fw_id = task.fw_id # lp may be None in offline mode if lp is None: raise RuntimeError("The LaunchPad in spec is None.") else: try: with open('FW.json', "rt") as fh: fw_dict = json.load(fh, cls=MontyDecoder) except IOError: try: with open('FW.yaml', "rt") as fh: fw_dict = yaml.load(fh) except IOError: raise RuntimeError("Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") logger.warning("LaunchPad not available from spec. Generated with auto_load.") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] # since it is not given that the LaunchPad is the correct one, try to verify if the workflow # and the fw_id are being accessed correctly try: fw = lp.get_fw_by_id(fw_id) except ValueError as e: traceback.print_exc() raise RuntimeError("The firework with id {} is not present in the LaunchPad {}. The LaunchPad is " "probably incorrect.". format(fw_id, lp)) if fw.state != "RUNNING": raise RuntimeError("The firework with id {} from LaunchPad {} is {}. There might be an error in the " "selection of the LaunchPad". format(fw_id, lp, fw.state)) if len(fw.tasks) != len(fw_dict['spec']['_tasks']): raise RuntimeError("The firework with id {} from LaunchPad {} is has different number of tasks " "from the current.".format(fw_id, lp)) for db_t, dict_t in zip(fw.tasks, fw_dict['spec']['_tasks']): if db_t.fw_name != dict_t['_fw_name']: raise RuntimeError("The firework with id {} from LaunchPad {} has task that don't match: " "{} and {}.".format(fw_id, lp, db_t.fw_name, dict_t['fw_name'])) return lp, fw_id
def run_task(self, fw_spec): # the FW.json/yaml file is mandatory to get the fw_id # no need to deserialize the whole FW if '_add_launchpad_and_fw_id' in fw_spec: lp = self.launchpad fw_id = self.fw_id else: try: fw_dict = loadfn('FW.json') except IOError: try: fw_dict = loadfn('FW.yaml') except IOError: raise RuntimeError( "Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] wf = lp.get_wf_by_fw_id(fw_id) wf_module = importlib.import_module(wf.metadata['workflow_module']) wf_class = getattr(wf_module, wf.metadata['workflow_class']) database = fw_spec['mongo_database'] if self.criteria is not None: entry = database.get_entry(criteria=self.criteria) else: entry = {} inserted = [] for root_key, method_name in self.insertion_data.items(): get_results_method = getattr(wf_class, method_name) results = get_results_method(wf) for key, val in results.items(): entry[key] = jsanitize(val) inserted.append(key) if self.criteria is not None: database.save_entry(entry=entry) else: database.insert_entry(entry=entry) logging.info("Inserted data:\n{}".format('- {}\n'.join(inserted))) return FWAction()
def run_task(self, fw_spec): # the FW.json/yaml file is mandatory to get the fw_id # no need to deserialize the whole FW if '_add_launchpad_and_fw_id' in fw_spec: lp = self.launchpad fw_id = self.fw_id else: try: fw_dict = loadfn('FW.json') except IOError: try: fw_dict = loadfn('FW.yaml') except IOError: raise RuntimeError("Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] wf = lp.get_wf_by_fw_id(fw_id) wf_module = importlib.import_module(wf.metadata['workflow_module']) wf_class = getattr(wf_module, wf.metadata['workflow_class']) database = fw_spec['mongo_database'] if self.criteria is not None: entry = database.get_entry(criteria=self.criteria) else: entry = {} inserted = [] for root_key, method_name in self.insertion_data.items(): get_results_method = getattr(wf_class, method_name) results = get_results_method(wf) for key, val in results.items(): entry[key] = jsanitize(val) inserted.append(key) if self.criteria is not None: database.save_entry(entry=entry) else: database.insert_entry(entry=entry) logging.info("Inserted data:\n{}".format('- {}\n'.join(inserted))) return FWAction()
def run_task(self, fw_spec): # the FW.json/yaml file is mandatory to get the fw_id # no need to deserialize the whole FW if '_add_launchpad_and_fw_id' in fw_spec: lp = self.launchpad fw_id = self.fw_id else: try: fw_dict = loadfn('FW.json') except IOError: try: fw_dict = loadfn('FW.yaml') except IOError: raise RuntimeError( "Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] wf = lp.get_wf_by_fw_id_lzyfw(fw_id) deleted_files = [] # iterate over all the fws and launches for fw_id, fw in wf.id_fw.items(): for l in fw.launches + fw.archived_launches: l_dir = l.launch_dir deleted_files.extend( self.delete_files(os.path.join(l_dir, TMPDIR_NAME))) deleted_files.extend( self.delete_files(os.path.join(l_dir, INDIR_NAME))) deleted_files.extend( self.delete_files(os.path.join(l_dir, OUTDIR_NAME), self.out_exts)) logging.info("Deleted files:\n {}".format("\n".join(deleted_files))) return FWAction(stored_data={'deleted_files': deleted_files})
def process_task(self, data): try: dir_name = data[0] parse_dos = data[1] prev_info = self.tasks.find_one({'dir_name_full': dir_name}, {'task_type': 1, 'snl_final': 1, 'snlgroup_id_final': 1, 'snlgroup_changed': 1}) drone = MPVaspDrone( host=self.host, port=self.port, database=self.database, user=self.admin_user, password=self.admin_password, collection=self.collection, parse_dos=parse_dos, additional_fields={}, update_duplicates=True) t_id, d = drone.assimilate(dir_name, launches_coll=LaunchPad.auto_load().launches) self.tasks.update({"task_id": t_id}, {"$set": {"snl_final": prev_info['snl_final'], "snlgroup_id_final": prev_info['snlgroup_id_final'], "snlgroup_changed": prev_info['snlgroup_changed']}}) print 'FINISHED', t_id except: print '-----' print 'ENCOUNTERED AN EXCEPTION!!!', data[0] traceback.print_exc() print '-----'
def run_task(self, fw_spec): # Get the fw_id and launchpad if '_add_launchpad_and_fw_id' in fw_spec: lp = self.launchpad fw_id = self.fw_id else: try: fw_dict = loadfn('FW.json') except IOError: try: fw_dict = loadfn('FW.yaml') except IOError: raise RuntimeError("Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] # Treat the case where there was some error that led to a fizzled state if '_fizzled_parents' in fw_spec: if len(fw_spec['_fizzled_parents']) != 1: raise ValueError('CheckTask\'s Firework should have exactly one parent firework') # Get the fizzled fw fizzled_fw_id = fw_spec['_fizzled_parents'][0]['fw_id'] fizzled_fw = lp.get_fw_by_id(fizzled_fw_id) # Sort handlers by their priority sorted_handlers = sorted([h for h in self.handlers if h.allow_fizzled], key=lambda x: x.handler_priority) # Get the corrections for all the handlers corrections = [] for handler in sorted_handlers: # Set needed data for the handlers (the spec of this check task/fw and the fw that has to be checked) handler.src_setup(fw_spec=fw_spec, fw_to_check=fizzled_fw) if handler.check(): corrections.append(handler.correct()) if handler.skip_remaining_handlers: break # In case of a fizzled parent, at least one correction is needed ! if len(corrections) == 0: raise RuntimeError('No corrections found for fizzled firework ...') # Apply the corrections fw_action = self.apply_corrections(fw_to_correct=fizzled_fw, corrections=corrections) return fw_action # Treat the case where there was no fizzled parents => forward "needed" outputs of the previous firework to the # next one. else: # Get the previous fw this_lzy_wf = lp.get_wf_by_fw_id_lzyfw(fw_id) parents_fw_ids = this_lzy_wf.links.parent_links[fw_id] if len(parents_fw_ids) != 1: raise ValueError('CheckTask\'s Firework should have exactly one parent firework') run_fw = lp.get_fw_by_id(parents_fw_ids[0]) # Get the corrections for all the handlers # Sort handlers by their priority if self.handlers is not None: sorted_handlers = sorted([h for h in self.handlers if h.allow_completed], key=lambda x: x.handler_priority) else: sorted_handlers = [] # Get the corrections for all the handlers corrections = [] for handler in sorted_handlers: # Set needed data for the handlers (the spec of this check task/fw and the fw that has to be checked) handler.src_setup(fw_spec=fw_spec, fw_to_check=run_fw) if handler.check(): corrections.append(handler.correct()) if handler.skip_remaining_handlers: break # If some corrections are found, apply and return the FWAction if len(corrections) > 0: fw_action = self.apply_corrections(fw_to_correct=run_fw, corrections=corrections) return fw_action # Validate the results if no error was found validators = self.validators if self.validators is not None else [] for validator in validators: if not validator.check(): raise RuntimeError('Validator invalidate results ...') stored_data = {} update_spec = {} mod_spec = [] for task_type, task_info in fw_spec['previous_fws'].items(): mod_spec.append({'_push_all': {'previous_fws->'+task_type: task_info}}) return FWAction(stored_data=stored_data, update_spec=update_spec, mod_spec=mod_spec)
def run_task(self, fw_spec): # Get the fw_id and launchpad if '_add_launchpad_and_fw_id' in fw_spec: lp = self.launchpad fw_id = self.fw_id else: try: with open('FW.json', "rt") as fh: fw_dict = json.load(fh, cls=MontyDecoder) except IOError: try: with open('FW.yaml', "rt") as fh: fw_dict = yaml.load(fh) except IOError: raise RuntimeError( "Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] # Treat the case where there was some error that led to a fizzled state if '_fizzled_parents' in fw_spec: if len(fw_spec['_fizzled_parents']) != 1: raise ValueError( 'CheckTask\'s Firework should have exactly one parent firework' ) # Get the fizzled fw fizzled_fw_id = fw_spec['_fizzled_parents'][0]['fw_id'] fizzled_fw = lp.get_fw_by_id(fizzled_fw_id) # Sort handlers by their priority sorted_handlers = sorted( [h for h in self.handlers if h.allow_fizzled], key=lambda x: x.handler_priority) # Get the corrections for all the handlers corrections = [] for handler in sorted_handlers: # Set needed data for the handlers (the spec of this check task/fw and the fw that has to be checked) handler.src_setup(fw_spec=fw_spec, fw_to_check=fizzled_fw) if handler.check(): corrections.append(handler.correct()) if handler.skip_remaining_handlers: break # In case of a fizzled parent, at least one correction is needed ! if len(corrections) == 0: raise RuntimeError( 'No corrections found for fizzled firework ...') # Apply the corrections fw_action = self.apply_corrections(fw_to_correct=fizzled_fw, corrections=corrections) return fw_action # Treat the case where there was no fizzled parents => forward "needed" outputs of the previous firework to the # next one. else: # Get the previous fw this_lzy_wf = lp.get_wf_by_fw_id_lzyfw(fw_id) parents_fw_ids = this_lzy_wf.links.parent_links[fw_id] if len(parents_fw_ids) != 1: raise ValueError( 'CheckTask\'s Firework should have exactly one parent firework' ) run_fw = lp.get_fw_by_id(parents_fw_ids[0]) # Get the corrections for all the handlers # Sort handlers by their priority if self.handlers is not None: sorted_handlers = sorted( [h for h in self.handlers if h.allow_completed], key=lambda x: x.handler_priority) else: sorted_handlers = [] # Get the corrections for all the handlers corrections = [] for handler in sorted_handlers: # Set needed data for the handlers (the spec of this check task/fw and the fw that has to be checked) handler.src_setup(fw_spec=fw_spec, fw_to_check=run_fw) if handler.check(): corrections.append(handler.correct()) if handler.skip_remaining_handlers: break # If some corrections are found, apply and return the FWAction if len(corrections) > 0: fw_action = self.apply_corrections(fw_to_correct=run_fw, corrections=corrections) return fw_action # Validate the results if no error was found validators = self.validators if self.validators is not None else [] for validator in validators: if not validator.check(): raise RuntimeError('Validator invalidate results ...') stored_data = {} update_spec = {} mod_spec = [] for task_type, task_info in fw_spec['previous_fws'].items(): mod_spec.append( {'_push_all': { 'previous_fws->' + task_type: task_info }}) return FWAction(stored_data=stored_data, update_spec=update_spec, mod_spec=mod_spec)
# coding: utf-8 from __future__ import division, print_function, unicode_literals, absolute_import from atomate.qchem.workflows.base.FF_then_fragment import get_wf_FF_then_fragment from fireworks.core.launchpad import LaunchPad from pymatgen.core import Molecule mol = Molecule.from_file("BF4-.xyz") wf = get_wf_FF_then_fragment(molecule=mol, max_cores=32) lp = LaunchPad.auto_load() lp.add_wf(wf)
def auto_load(cls): sma = SubmissionMongoAdapterEG.auto_load() lp = LaunchPad.auto_load() return SubmissionProcessorEG(sma, lp)
def add_to_db(self, lpad=None): if not lpad: lpad = LaunchPad.auto_load() return lpad.add_wf(self.wf)
def add_to_db(self, lpad=None): if not lpad: lpad = LaunchPad.auto_load() return lpad.add_wf(self.wf)
def add_to_db(self): lpad = LaunchPad.auto_load() lpad.add_wf(self.wf)
def run_task(self, fw_spec): if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec: prev_dir = get_loc( fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir']) update_spec = {} # add this later when creating new FW fizzled_parent = True parse_dos = False else: prev_dir = get_loc(fw_spec['prev_vasp_dir']) update_spec = { 'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['prev_task_type'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters') } fizzled_parent = False parse_dos = 'Uniform' in fw_spec['prev_task_type'] if 'run_tags' in fw_spec: self.additional_fields['run_tags'] = fw_spec['run_tags'] else: self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][ 0]['spec']['run_tags'] if MOVE_TO_GARDEN_DEV: prev_dir = move_to_garden(prev_dir, prod=False) elif MOVE_TO_GARDEN_PROD: prev_dir = move_to_garden(prev_dir, prod=True) # get the directory containing the db file db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') logging.basicConfig(level=logging.INFO) logger = logging.getLogger('MPVaspDrone') logger.setLevel(logging.INFO) sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) logger.addHandler(sh) with open(db_path) as f: db_creds = json.load(f) drone = MPVaspDrone(host=db_creds['host'], port=db_creds['port'], database=db_creds['database'], user=db_creds['admin_user'], password=db_creds['admin_password'], collection=db_creds['collection'], parse_dos=parse_dos, additional_fields=self.additional_fields, update_duplicates=self.update_duplicates) t_id, d = drone.assimilate( prev_dir, launches_coll=LaunchPad.auto_load().launches) mpsnl = d['snl_final'] if 'snl_final' in d else d['snl'] snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d[ 'snlgroup_id'] update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id}) print 'ENTERED task id:', t_id stored_data = {'task_id': t_id} if d['state'] == 'successful': update_spec['analysis'] = d['analysis'] update_spec['output'] = d['output'] update_spec['vasp'] = { 'incar': d['calculations'][-1]['input']['incar'], 'kpoints': d['calculations'][-1]['input']['kpoints'] } update_spec["task_id"] = t_id return FWAction(stored_data=stored_data, update_spec=update_spec) # not successful - first test to see if UnconvergedHandler is needed if not fizzled_parent: unconverged_tag = 'unconverged_handler--{}'.format( fw_spec['prev_task_type']) output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml')) ueh = UnconvergedErrorHandler(output_filename=output_dir) # TODO: make this a little more flexible if ueh.check() and unconverged_tag not in fw_spec['run_tags']: print 'Unconverged run! Creating dynamic FW...' spec = { 'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['task_type'], 'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id, 'task_type': fw_spec['prev_task_type'], 'run_tags': list(fw_spec['run_tags']), 'parameters': fw_spec.get('parameters'), '_dupefinder': DupeFinderVasp().to_dict(), '_priority': fw_spec['_priority'] } # Pass elastic tensor spec if 'deformation_matrix' in fw_spec.keys(): spec['deformation_matrix'] = fw_spec['deformation_matrix'] spec['original_task_id'] = fw_spec['original_task_id'] snl = StructureNL.from_dict(spec['mpsnl']) spec['run_tags'].append(unconverged_tag) spec['_queueadapter'] = QA_VASP fws = [] connections = {} f = Composition(snl.structure.composition.reduced_formula ).alphabetical_formula fws.append( Firework([ VaspCopyTask({ 'files': [ 'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CONTCAR' ], 'use_CONTCAR': False }), SetupUnconvergedHandlerTask(), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-2)) spec = { 'task_type': 'VASP db insertion', '_allow_fizzled_parents': True, '_priority': fw_spec['_priority'], '_queueadapter': QA_DB, 'run_tags': list(fw_spec['run_tags']) } if 'deformation_matrix' in fw_spec.keys(): spec['deformation_matrix'] = fw_spec['deformation_matrix'] spec['original_task_id'] = fw_spec['original_task_id'] spec['run_tags'].append(unconverged_tag) fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1)) connections[-2] = -1 wf = Workflow(fws, connections) return FWAction(detours=wf) # not successful and not due to convergence problem - FIZZLE raise ValueError("DB insertion successful, but don't know how to \ fix this Firework! Can't continue with workflow...")
def auto_load(cls): spsma = SPSubmissionsMongoAdapter.auto_load() lp = LaunchPad.auto_load() return SPSubmissionProcessor(spsma, lp)
def auto_load(cls): spsma = SPSubmissionsMongoAdapter.auto_load() lp = LaunchPad.auto_load() return SPSubmissionProcessor(spsma, lp)
def get_lp_and_fw_id_from_task(task, fw_spec): """ Given an instance of a running task and its spec, tries to load the LaunchPad and the current fw_id. It will first check for "_add_launchpad_and_fw_id", then try to load from FW.json/FW.yaml file. Should be used inside tasks that require to access to the LaunchPad and to the whole workflow. Args: task: An instance of a running task fw_spec: The spec of the task Returns: an instance of LaunchPah and the fw_id of the current task """ if '_add_launchpad_and_fw_id' in fw_spec: lp = task.launchpad fw_id = task.fw_id # lp may be None in offline mode if lp is None: raise RuntimeError("The LaunchPad in spec is None.") else: try: with open('FW.json', "rt") as fh: fw_dict = json.load(fh, cls=MontyDecoder) except IOError: try: with open('FW.yaml', "rt") as fh: fw_dict = yaml.load(fh) except IOError: raise RuntimeError( "Launchpad/fw_id not present in spec and No FW.json nor FW.yaml file present: " "impossible to determine fw_id") logger.warning( "LaunchPad not available from spec. Generated with auto_load.") lp = LaunchPad.auto_load() fw_id = fw_dict['fw_id'] # since it is not given that the LaunchPad is the correct one, try to verify if the workflow # and the fw_id are being accessed correctly try: fw = lp.get_fw_by_id(fw_id) except ValueError as e: traceback.print_exc() raise RuntimeError( "The firework with id {} is not present in the LaunchPad {}. The LaunchPad is " "probably incorrect.".format(fw_id, lp)) if fw.state != "RUNNING": raise RuntimeError( "The firework with id {} from LaunchPad {} is {}. There might be an error in the " "selection of the LaunchPad".format(fw_id, lp, fw.state)) if len(fw.tasks) != len(fw_dict['spec']['_tasks']): raise RuntimeError( "The firework with id {} from LaunchPad {} is has different number of tasks " "from the current.".format(fw_id, lp)) for db_t, dict_t in zip(fw.tasks, fw_dict['spec']['_tasks']): if db_t.fw_name != dict_t['_fw_name']: raise RuntimeError( "The firework with id {} from LaunchPad {} has task that don't match: " "{} and {}.".format(fw_id, lp, db_t.fw_name, dict_t['fw_name'])) return lp, fw_id
def add_to_db(self): lpad = LaunchPad.auto_load() lpad.add_wf(self.wf)
from atomate.qchem.workflows.base.FF_then_fragment import get_wf_FF_then_fragment from fireworks.core.launchpad import LaunchPad from pymatgen.core import Molecule mol = Molecule.from_file("BF4-.xyz") wf = get_wf_FF_then_fragment(molecule=mol, max_cores=32) lp = LaunchPad.auto_load() lp.add_wf(wf)
def run_task(self, fw_spec): if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec: prev_dir = get_loc(fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir']) update_spec = {} # add this later when creating new FW fizzled_parent = True parse_dos = False else: prev_dir = get_loc(fw_spec['prev_vasp_dir']) update_spec = {'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['prev_task_type'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')} fizzled_parent = False parse_dos = 'Uniform' in fw_spec['prev_task_type'] if 'run_tags' in fw_spec: self.additional_fields['run_tags'] = fw_spec['run_tags'] else: self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][0]['spec']['run_tags'] if MOVE_TO_GARDEN_DEV: prev_dir = move_to_garden(prev_dir, prod=False) elif MOVE_TO_GARDEN_PROD: prev_dir = move_to_garden(prev_dir, prod=True) # get the directory containing the db file db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') logging.basicConfig(level=logging.INFO) logger = logging.getLogger('MPVaspDrone') logger.setLevel(logging.INFO) sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) logger.addHandler(sh) with open(db_path) as f: db_creds = json.load(f) drone = MPVaspDrone( host=db_creds['host'], port=db_creds['port'], database=db_creds['database'], user=db_creds['admin_user'], password=db_creds['admin_password'], collection=db_creds['collection'], parse_dos=parse_dos, additional_fields=self.additional_fields, update_duplicates=self.update_duplicates) t_id, d = drone.assimilate(prev_dir, launches_coll=LaunchPad.auto_load().launches) mpsnl = d['snl_final'] if 'snl_final' in d else d['snl'] snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d['snlgroup_id'] update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id}) print 'ENTERED task id:', t_id stored_data = {'task_id': t_id} if d['state'] == 'successful': update_spec['analysis'] = d['analysis'] update_spec['output'] = d['output'] return FWAction(stored_data=stored_data, update_spec=update_spec) # not successful - first test to see if UnconvergedHandler is needed if not fizzled_parent: unconverged_tag = 'unconverged_handler--{}'.format(fw_spec['prev_task_type']) output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml')) ueh = UnconvergedErrorHandler(output_filename=output_dir) if ueh.check() and unconverged_tag not in fw_spec['run_tags']: print 'Unconverged run! Creating dynamic FW...' spec = {'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['task_type'], 'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id, 'task_type': fw_spec['prev_task_type'], 'run_tags': list(fw_spec['run_tags']), 'parameters': fw_spec.get('parameters'), '_dupefinder': DupeFinderVasp().to_dict(), '_priority': fw_spec['_priority']} snl = StructureNL.from_dict(spec['mpsnl']) spec['run_tags'].append(unconverged_tag) spec['_queueadapter'] = QA_VASP fws = [] connections = {} f = Composition.from_formula( snl.structure.composition.reduced_formula).alphabetical_formula fws.append(FireWork( [VaspCopyTask({'files': ['INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CONTCAR'], 'use_CONTCAR': False}), SetupUnconvergedHandlerTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-2)) spec = {'task_type': 'VASP db insertion', '_allow_fizzled_parents': True, '_priority': fw_spec['_priority'], '_queueadapter': QA_DB, 'run_tags': list(fw_spec['run_tags'])} spec['run_tags'].append(unconverged_tag) fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1)) connections[-2] = -1 wf = Workflow(fws, connections) return FWAction(detours=wf) # not successful and not due to convergence problem - FIZZLE raise ValueError("DB insertion successful, but don't know how to fix this FireWork! Can't continue with workflow...")