def snl_to_wf_elastic(snl, parameters): # parameters["user_vasp_settings"] specifies user defined incar/kpoints parameters fws = [] connections = defaultdict(list) parameters = parameters if parameters else {} snl_priority = parameters.get('priority', 1) priority = snl_priority * 2 # once we start a job, keep going! f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula # add the SNL to the SNL DB and figure out duplicate group tasks = [AddSNLTask()] spec = {'task_type': 'Add to SNL database', 'snl': snl.as_dict(), '_queueadapter': QA_DB, '_priority': snl_priority} if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL): spec['force_mpsnl'] = snl.as_dict() spec['force_snlgroup_id'] = parameters['snlgroup_id'] del spec['snl'] fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0)) connections[0] = [1] parameters["exact_structure"] = True # run GGA structure optimization for force convergence spec = snl_to_wf._snl_to_spec(snl, parameters=parameters) user_vasp_settings = parameters.get("user_vasp_settings") spec = update_spec_force_convergence(spec, user_vasp_settings) spec['run_tags'].append("origin") spec['_priority'] = priority spec['_queueadapter'] = QA_VASP del spec['_dupefinder'] spec['task_type'] = "Vasp force convergence optimize structure (2x)" tasks = [VaspWriterTask(), get_custodian_task(spec)] fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1)) # insert into DB - GGA structure optimization spec = {'task_type': 'VASP db insertion', '_priority': priority, '_allow_fizzled_parents': True, '_queueadapter': QA_DB, 'clean_task_doc':True, 'elastic_constant':"force_convergence"} fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2)) connections[1] = [2] spec = {'task_type': 'Setup Deformed Struct Task', '_priority': priority, '_queueadapter': QA_CONTROL} fws.append(Firework([SetupDeformedStructTask()], spec, name=get_slug(f + '--' + spec['task_type']),fw_id=3)) connections[2] = [3] wf_meta = get_meta_from_structure(snl.structure) wf_meta['run_version'] = 'May 2013 (1)' if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']: wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id'] return Workflow(fws, connections, name=Composition( snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
def snl_to_wf_phonon(snl, parameters): # parameters["user_vasp_settings"] specifies user defined incar/kpoints parameters fws = [] connections = defaultdict(list) parameters = parameters if parameters else {} snl_priority = parameters.get('priority', 1) priority = snl_priority * 2 # once we start a job, keep going! f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula # add the SNL to the SNL DB and figure out duplicate group tasks = [AddSNLTask()] spec = {'task_type': 'Add to SNL database', 'snl': snl.as_dict(), '_queueadapter': QA_DB, '_priority': snl_priority} if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL): spec['force_mpsnl'] = snl.as_dict() spec['force_snlgroup_id'] = parameters['snlgroup_id'] del spec['snl'] fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0)) connections[0] = [1] parameters["exact_structure"] = True # run GGA structure optimization for force convergence spec = snl_to_wf._snl_to_spec(snl, parameters=parameters) user_vasp_settings = parameters.get("user_vasp_settings") spec = update_spec_force_convergence(spec, user_vasp_settings) spec['run_tags'].append("origin") spec['_priority'] = priority spec['_queueadapter'] = QA_VASP del spec['_dupefinder'] spec['task_type'] = "Vasp force convergence optimize structure (2x)" tasks = [VaspWriterTask(), get_custodian_task(spec)] fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1)) # insert into DB - GGA structure optimization spec = {'task_type': 'VASP db insertion', '_priority': priority, '_allow_fizzled_parents': True, '_queueadapter': QA_DB, 'clean_task_doc':True, 'elastic_constant':"force_convergence"} fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2)) connections[1] = [2] spec = {'task_type': 'Setup Deformed Struct Task', '_priority': priority, '_queueadapter': QA_CONTROL} fws.append(Firework([SetupDeformedStructTask()], spec, name=get_slug(f + '--' + spec['task_type']),fw_id=3)) connections[2] = [3] wf_meta = get_meta_from_structure(snl.structure) wf_meta['run_version'] = 'May 2013 (1)' if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']: wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id'] return Workflow(fws, connections, name=Composition( snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
def run_task(self, fw_spec): # Read structure from previous relaxation relaxed_struct = fw_spec['output']['crystal'] # Generate deformed structures d_struct_set = DeformedStructureSet(relaxed_struct, ns=0.06) wf=[] for i, d_struct in enumerate(d_struct_set.def_structs): fws=[] connections={} f = Composition(d_struct.formula).alphabetical_formula snl = StructureNL(d_struct, 'Joseph Montoya <*****@*****.**>', projects=["Elasticity"]) tasks = [AddSNLTask()] snl_priority = fw_spec.get('priority', 1) spec = {'task_type': 'Add Deformed Struct to SNL database', 'snl': snl.as_dict(), '_queueadapter': QA_DB, '_priority': snl_priority} if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL): spec['force_mpsnl'] = snl.as_dict() spec['force_snlgroup_id'] = fw_spec['snlgroup_id'] del spec['snl'] fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1000+i*10)) connections[-1000+i*10] = [-999+i*10] spec = snl_to_wf._snl_to_spec(snl, parameters={'exact_structure':True}) spec = update_spec_force_convergence(spec) spec['deformation_matrix'] = d_struct_set.deformations[i].tolist() spec['original_task_id'] = fw_spec["task_id"] spec['_priority'] = fw_spec['_priority']*2 #Turn off dupefinder for deformed structure del spec['_dupefinder'] spec['task_type'] = "Optimize deformed structure" fws.append(Firework([VaspWriterTask(), SetupElastConstTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-999+i*10)) priority = fw_spec['_priority']*3 spec = {'task_type': 'VASP db insertion', '_priority': priority, '_allow_fizzled_parents': True, '_queueadapter': QA_DB, 'elastic_constant':"deformed_structure", 'clean_task_doc':True, 'deformation_matrix':d_struct_set.deformations[i].tolist(), 'original_task_id':fw_spec["task_id"]} fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-998+i*10)) connections[-999+i*10] = [-998+i*10] wf.append(Workflow(fws, connections)) return FWAction(additions=wf)
def run_task(self, fw_spec): # Read structure from previous relaxation relaxed_struct = fw_spec['output']['crystal'] # Generate deformed structures d_struct_set = DeformedStructureSet(relaxed_struct, ns=0.06) wf=[] for i, d_struct in enumerate(d_struct_set.def_structs): fws=[] connections={} f = Composition(d_struct.formula).alphabetical_formula snl = StructureNL(d_struct, 'Joseph Montoya <*****@*****.**>', projects=["Elasticity"]) tasks = [AddSNLTask()] snl_priority = fw_spec.get('priority', 1) spec = {'task_type': 'Add Deformed Struct to SNL database', 'snl': snl.as_dict(), '_queueadapter': QA_DB, '_priority': snl_priority} if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL): spec['force_mpsnl'] = snl.as_dict() spec['force_snlgroup_id'] = fw_spec['snlgroup_id'] del spec['snl'] fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1000+i*10)) connections[-1000+i*10] = [-999+i*10] spec = snl_to_wf._snl_to_spec(snl, parameters={'exact_structure':True}) spec = update_spec_force_convergence(spec) spec['deformation_matrix'] = d_struct_set.deformations[i].tolist() spec['original_task_id'] = fw_spec["task_id"] spec['_priority'] = fw_spec['_priority']*2 #Turn off dupefinder for deformed structure del spec['_dupefinder'] spec['task_type'] = "Optimize deformed structure" fws.append(Firework([VaspWriterTask(), SetupElastConstTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-999+i*10)) priority = fw_spec['_priority']*3 spec = {'task_type': 'VASP db insertion', '_priority': priority, '_allow_fizzled_parents': True, '_queueadapter': QA_DB, 'elastic_constant':"deformed_structure", 'clean_task_doc':True, 'deformation_matrix':d_struct_set.deformations[i].tolist(), 'original_task_id':fw_spec["task_id"]} fws.append(Firework([VaspToDBTask(), AddElasticDataToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-998+i*10)) connections[-999+i*10] = [-998+i*10] wf.append(Workflow(fws, connections)) return FWAction(additions=wf)
def append_wf(fw_id, parent_fw_id=None): wf = lpdb.workflows.find_one({'nodes':fw_id}, {'parent_links':1,'links':1,'name':1}) try: if parent_fw_id is None: parent_fw_id = wf['parent_links'][str(fw_id)][-1] # non-defused AddEStructureTask v2 already in children? for child_fw_id in wf['links'][str(parent_fw_id)]: if child_fw_id == parent_fw_id: continue child_fw = lpdb.fireworks.find_one({'fw_id': child_fw_id}, {'spec.task_type':1, 'state':1}) if child_fw['spec']['task_type'] == 'Controller: add Electronic Structure v2': if child_fw['state'] == 'DEFUSED': lpdb.reignite_fw(child_fw_id) print 'AddEStructureTask v2', child_fw_id , 'reignited for', fw_id elif child_fw['state'] == 'FIZZLED': lpdb.rerun_fw(child_fw_id) print 'AddEStructureTask v2', child_fw_id , 'marked for rerun for', fw_id elif child_fw['state'] == 'COMPLETED': print 'AddEStructureTask v2 already successfully run for', fw_id sec_child_fw_id = wf['links'][str(child_fw_id)][0] sec_child_fw = lpdb.fireworks.find_one({'fw_id': sec_child_fw_id}, {'spec.task_type':1, 'state':1}) if sec_child_fw['state'] == 'FIZZLED': lpdb.rerun_fw(sec_child_fw_id) print 'FIZZLED -> marked for rerun:', sec_child_fw_id, sec_child_fw['spec']['task_type'] else: print 'AddEStructureTask v2 added but neither DEFUSED, FIZZLED, or COMPLETED for', fw_id return f = lpdb.get_wf_summary_dict(fw_id)['name'].replace(' ', '_') name = get_slug(f + '--' + spec['task_type']) fw = Firework([AddEStructureTask()], spec, name=name) lpdb.append_wf(Workflow([fw]), [parent_fw_id]) print name, 'added for', fw_id except ValueError: raise ValueError('could not append controller task to wf', wf['name'])
def append_wf(fw_id, parent_fw_id=None): wf = lpdb.workflows.find_one({'nodes': fw_id}, { 'parent_links': 1, 'links': 1, 'name': 1 }) try: if parent_fw_id is None: parent_fw_id = wf['parent_links'][str(fw_id)][-1] # non-defused AddEStructureTask v2 already in children? for child_fw_id in wf['links'][str(parent_fw_id)]: if child_fw_id == parent_fw_id: continue child_fw = lpdb.fireworks.find_one({'fw_id': child_fw_id}, { 'spec.task_type': 1, 'state': 1 }) if child_fw['spec'][ 'task_type'] == 'Controller: add Electronic Structure v2': if child_fw['state'] == 'DEFUSED': lpdb.reignite_fw(child_fw_id) print 'AddEStructureTask v2', child_fw_id, 'reignited for', fw_id elif child_fw['state'] == 'FIZZLED': lpdb.rerun_fw(child_fw_id) print 'AddEStructureTask v2', child_fw_id, 'marked for rerun for', fw_id elif child_fw['state'] == 'COMPLETED': print 'AddEStructureTask v2 already successfully run for', fw_id sec_child_fw_id = wf['links'][str(child_fw_id)][0] sec_child_fw = lpdb.fireworks.find_one( {'fw_id': sec_child_fw_id}, { 'spec.task_type': 1, 'state': 1 }) if sec_child_fw['state'] == 'FIZZLED': lpdb.rerun_fw(sec_child_fw_id) print 'FIZZLED -> marked for rerun:', sec_child_fw_id, sec_child_fw[ 'spec']['task_type'] else: print 'AddEStructureTask v2 added but neither DEFUSED, FIZZLED, or COMPLETED for', fw_id return f = lpdb.get_wf_summary_dict(fw_id)['name'].replace(' ', '_') name = get_slug(f + '--' + spec['task_type']) fw = Firework([AddEStructureTask()], spec, name=name) lpdb.append_wf(Workflow([fw]), [parent_fw_id]) print name, 'added for', fw_id except ValueError: raise ValueError('could not append controller task to wf', wf['name'])
def task_dict_to_wf(task_dict, launchpad): fw_id = launchpad.get_new_fw_id() l_id = launchpad.get_new_launch_id() spec = {'task_type': task_dict['task_type'], 'run_tags': task_dict['run_tags'], 'vaspinputset_name': None, 'vasp': None, 'mpsnl': task_dict['snl'], 'snlgroup_id': task_dict['snlgroup_id']} tasks = [DummyLegacyTask()] launch_dir = task_dict['dir_name_full'] stored_data = {'error_list': []} update_spec = {'prev_vasp_dir': task_dict['dir_name'], 'prev_task_type': spec['task_type'], 'mpsnl': spec['mpsnl'], 'snlgroup_id': spec['snlgroup_id'], 'run_tags': spec['run_tags']} fwaction = FWAction(stored_data=stored_data, update_spec=update_spec) if task_dict['completed_at']: complete_date = datetime.datetime.strptime(task_dict['completed_at'], "%Y-%m-%d %H:%M:%S") state_history = [{"created_on": complete_date, 'state': 'COMPLETED'}] else: state_history = [] launches = [Launch('COMPLETED', launch_dir, fworker=None, host=None, ip=None, action=fwaction, state_history=state_history, launch_id=l_id, fw_id=fw_id)] f = Composition(task_dict['pretty_formula']).alphabetical_formula fw = Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), launches=launches, state='COMPLETED', created_on=None, fw_id=fw_id) wf_meta = get_meta_from_structure(Structure.from_dict(task_dict['snl'])) wf_meta['run_version'] = 'preproduction (0)' wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta) launchpad.add_wf(wf, reassign_all=False) launchpad._upsert_launch(launches[0]) print 'ADDED', fw_id # return fw_id return fw_id
def task_dict_to_wf(task_dict, launchpad): fw_id = launchpad.get_new_fw_id() l_id = launchpad.get_new_launch_id() spec = {'task_type': task_dict['task_type'], 'run_tags': task_dict['run_tags'], 'vaspinputset_name': None, 'vasp': None, 'mpsnl': task_dict['snl'], 'snlgroup_id': task_dict['snlgroup_id']} tasks = [DummyLegacyTask()] launch_dir = task_dict['dir_name_full'] stored_data = {'error_list': []} update_spec = {'prev_vasp_dir': task_dict['dir_name'], 'prev_task_type': spec['task_type'], 'mpsnl': spec['mpsnl'], 'snlgroup_id': spec['snlgroup_id'], 'run_tags': spec['run_tags']} fwaction = FWAction(stored_data=stored_data, update_spec=update_spec) if task_dict['completed_at']: complete_date = datetime.datetime.strptime(task_dict['completed_at'], "%Y-%m-%d %H:%M:%S") state_history = [{"created_on": complete_date, 'state': 'COMPLETED'}] else: state_history = [] launches = [Launch('COMPLETED', launch_dir, fworker=None, host=None, ip=None, action=fwaction, state_history=state_history, launch_id=l_id, fw_id=fw_id)] f = Composition.from_formula(task_dict['pretty_formula']).alphabetical_formula fw = FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), launches=launches, state='COMPLETED', created_on=None, fw_id=fw_id) wf_meta = get_meta_from_structure(Structure.from_dict(task_dict['snl'])) wf_meta['run_version'] = 'preproduction (0)' wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta) launchpad.add_wf(wf, reassign_all=False) launchpad._upsert_launch(launches[0]) print 'ADDED', fw_id # return fw_id return fw_id
def add_namefile(original_wf, use_slug=True): """ Every FireWork begins by writing an empty file with the name "FW--<fw.name>". This makes it easy to figure out what jobs are in what launcher directories, e.g. "ls -l launch*/FW--*" from within a "block" dir. Args: original_wf (Workflow) use_slug (bool): whether to replace whitespace-type chars with a slug """ wf_dict = original_wf.to_dict() for idx, fw in enumerate(wf_dict["fws"]): fname = "FW--{}".format(fw["name"]) if use_slug: fname = get_slug(fname) wf_dict["fws"][idx]["spec"]["_tasks"].insert(0, FileWriteTask( files_to_write=[{"filename": fname, "contents": ""}]).to_dict()) return Workflow.from_dict(wf_dict)
def add_namefile(original_wf, use_slug=True): """ Every FireWork begins by writing an empty file with the name "FW--<fw.name>". This makes it easy to figure out what jobs are in what launcher directories, e.g. "ls -l launch*/FW--*" from within a "block" dir. Args: original_wf (Workflow) use_slug (bool): whether to replace whitespace-type chars with a slug Returns: Workflow """ for idx, fw in enumerate(original_wf.fws): fname = "FW--{}".format(fw.name) if use_slug: fname = get_slug(fname) original_wf.fws[idx].tasks.insert(0, FileWriteTask( files_to_write=[{"filename": fname, "contents": ""}])) return original_wf
def run_task(self, fw_spec): # import here to prevent import errors in bigger MPCollab # get the band structure and nelect from files """ prev_dir = get_loc(fw_spec['prev_vasp_dir']) vasprun_loc = zpath(os.path.join(prev_dir, 'vasprun.xml')) kpoints_loc = zpath(os.path.join(prev_dir, 'KPOINTS')) vr = Vasprun(vasprun_loc) bs = vr.get_band_structure(kpoints_filename=kpoints_loc) """ filename = get_slug( 'JOB--' + fw_spec['mpsnl'].structure.composition.reduced_formula + '--' + fw_spec['task_type']) with open(filename, 'w+') as f: f.write('') # get the band structure and nelect from DB block_part = get_block_part(fw_spec['prev_vasp_dir']) db_dir = os.environ['DB_LOC'] assert isinstance(db_dir, object) db_path = os.path.join(db_dir, 'tasks_db.json') with open(db_path) as f: creds = json.load(f) connection = MongoClient(creds['host'], creds['port']) tdb = connection[creds['database']] tdb.authenticate(creds['admin_user'], creds['admin_password']) props = {"calculations": 1, "task_id": 1, "state": 1, "pseudo_potential": 1, "run_type": 1, "is_hubbard": 1, "hubbards": 1, "unit_cell_formula": 1} m_task = tdb.tasks.find_one({"dir_name": block_part}, props) if not m_task: time.sleep(60) # only thing to think of is wait for DB insertion(?) m_task = tdb.tasks.find_one({"dir_name": block_part}, props) if not m_task: raise ValueError("Could not find task with dir_name: {}".format(block_part)) if m_task['state'] != 'successful': raise ValueError("Cannot run Boltztrap; parent job unsuccessful") nelect = m_task['calculations'][0]['input']['parameters']['NELECT'] bs_id = m_task['calculations'][0]['band_structure_fs_id'] print bs_id, type(bs_id) fs = gridfs.GridFS(tdb, 'band_structure_fs') bs_dict = json.loads(fs.get(bs_id).read()) bs_dict['structure'] = m_task['calculations'][0]['output']['crystal'] bs = BandStructure.from_dict(bs_dict) print 'Band Structure found:', bool(bs) print nelect # run Boltztrap runner = BoltztrapRunner(bs, nelect) dir = runner.run(path_dir=os.getcwd()) # put the data in the database bta = BoltztrapAnalyzer.from_files(dir) # 8/21/15 - Anubhav removed fs_id (also see line further below, ted['boltztrap_full_fs_id'] ...) # 8/21/15 - this is to save space in MongoDB, as well as non-use of full Boltztrap output (vs rerun) """ data = bta.as_dict() data.update(get_meta_from_structure(bs._structure)) data['snlgroup_id'] = fw_spec['snlgroup_id'] data['run_tags'] = fw_spec['run_tags'] data['snl'] = fw_spec['mpsnl'] data['dir_name_full'] = dir data['dir_name'] = get_block_part(dir) data['task_id'] = m_task['task_id'] del data['hall'] # remove because it is too large and not useful fs = gridfs.GridFS(tdb, "boltztrap_full_fs") btid = fs.put(json.dumps(jsanitize(data))) """ # now for the "sanitized" data ted = bta.as_dict() del ted['seebeck'] del ted['hall'] del ted['kappa'] del ted['cond'] # ted['boltztrap_full_fs_id'] = btid ted['snlgroup_id'] = fw_spec['snlgroup_id'] ted['run_tags'] = fw_spec['run_tags'] ted['snl'] = fw_spec['mpsnl'].as_dict() ted['dir_name_full'] = dir ted['dir_name'] = get_block_part(dir) ted['task_id'] = m_task['task_id'] ted['pf_doping'] = bta.get_power_factor(output='tensor', relaxation_time=self.TAU) ted['zt_doping'] = bta.get_zt(output='tensor', relaxation_time=self.TAU, kl=self.KAPPAL) ted['pf_eigs'] = self.get_eigs(ted, 'pf_doping') ted['pf_best'] = self.get_extreme(ted, 'pf_eigs') ted['pf_best_dope18'] = self.get_extreme(ted, 'pf_eigs', max_didx=3) ted['pf_best_dope19'] = self.get_extreme(ted, 'pf_eigs', max_didx=4) ted['zt_eigs'] = self.get_eigs(ted, 'zt_doping') ted['zt_best'] = self.get_extreme(ted, 'zt_eigs') ted['zt_best_dope18'] = self.get_extreme(ted, 'zt_eigs', max_didx=3) ted['zt_best_dope19'] = self.get_extreme(ted, 'zt_eigs', max_didx=4) ted['seebeck_eigs'] = self.get_eigs(ted, 'seebeck_doping') ted['seebeck_best'] = self.get_extreme(ted, 'seebeck_eigs') ted['seebeck_best_dope18'] = self.get_extreme(ted, 'seebeck_eigs', max_didx=3) ted['seebeck_best_dope19'] = self.get_extreme(ted, 'seebeck_eigs', max_didx=4) ted['cond_eigs'] = self.get_eigs(ted, 'cond_doping') ted['cond_best'] = self.get_extreme(ted, 'cond_eigs') ted['cond_best_dope18'] = self.get_extreme(ted, 'cond_eigs', max_didx=3) ted['cond_best_dope19'] = self.get_extreme(ted, 'cond_eigs', max_didx=4) ted['kappa_eigs'] = self.get_eigs(ted, 'kappa_doping') ted['kappa_best'] = self.get_extreme(ted, 'kappa_eigs', maximize=False) ted['kappa_best_dope18'] = self.get_extreme(ted, 'kappa_eigs', maximize=False, max_didx=3) ted['kappa_best_dope19'] = self.get_extreme(ted, 'kappa_eigs', maximize=False, max_didx=4) try: from mpcollab.thermoelectrics.boltztrap_TE import BoltzSPB bzspb = BoltzSPB(ted) maxpf_p = bzspb.get_maximum_power_factor('p', temperature=0, tau=1E-14, ZT=False, kappal=0.5,\ otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', \ 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) maxpf_n = bzspb.get_maximum_power_factor('n', temperature=0, tau=1E-14, ZT=False, kappal=0.5,\ otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', \ 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) maxzt_p = bzspb.get_maximum_power_factor('p', temperature=0, tau=1E-14, ZT=True, kappal=0.5, otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', \ 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) maxzt_n = bzspb.get_maximum_power_factor('n', temperature=0, tau=1E-14, ZT=True, kappal=0.5, otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', \ 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) ted['zt_best_finemesh'] = {'p': maxzt_p, 'n': maxzt_n} ted['pf_best_finemesh'] = {'p': maxpf_p, 'n': maxpf_n} except: import traceback traceback.print_exc() print 'COULD NOT GET FINE MESH DATA' # add is_compatible mpc = MaterialsProjectCompatibility("Advanced") try: func = m_task["pseudo_potential"]["functional"] labels = m_task["pseudo_potential"]["labels"] symbols = ["{} {}".format(func, label) for label in labels] parameters = {"run_type": m_task["run_type"], "is_hubbard": m_task["is_hubbard"], "hubbards": m_task["hubbards"], "potcar_symbols": symbols} entry = ComputedEntry(Composition(m_task["unit_cell_formula"]), 0.0, 0.0, parameters=parameters, entry_id=m_task["task_id"]) ted["is_compatible"] = bool(mpc.process_entry(entry)) except: traceback.print_exc() print 'ERROR in getting compatibility, task_id: {}'.format(m_task["task_id"]) ted["is_compatible"] = None tdb.boltztrap.insert(jsanitize(ted)) update_spec = {'prev_vasp_dir': fw_spec['prev_vasp_dir'], 'boltztrap_dir': os.getcwd(), 'prev_task_type': fw_spec['task_type'], 'mpsnl': fw_spec['mpsnl'].as_dict(), 'snlgroup_id': fw_spec['snlgroup_id'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')} return FWAction(update_spec=update_spec)
def structure_to_wf(structure): """ This method starts with a Structure object and creates a Workflow object The workflow has two steps - a structure relaxation and a static run :param structure: :return: """ fws = [] # list of FireWorks to run connections = defaultdict(list) # dependencies between FireWorks # generate VASP input objects for 1st VASP run - this is put in the FW spec mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2}) incar = mpvis.get_incar(structure) poscar = mpvis.get_poscar(structure) kpoints = mpvis.get_kpoints(structure) potcar = mpvis.get_potcar(structure) # serialize the VASP input objects to the FW spec spec = {} spec['vasp'] = {} spec['vasp']['incar'] = incar.as_dict() spec['vasp']['poscar'] = poscar.as_dict() spec['vasp']['kpoints'] = kpoints.as_dict() spec['vasp']['potcar'] = potcar.as_dict() spec['vaspinputset_name'] = mpvis.__class__.__name__ spec['task_type'] = 'GGA optimize structure (2x) example' # set up the custodian that we want to run jobs = VaspJob.double_relaxation_run('', gzipped=False) for j in jobs: # turn off auto npar, it doesn't work for >1 node j.auto_npar = False handlers = [VaspErrorHandler(), FrozenJobErrorHandler(), MeshSymmetryErrorHandler(), NonConvergingErrorHandler()] c_params = {'jobs': [j.as_dict() for j in jobs], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5} custodiantask = VaspCustodianTaskEx(c_params) # 1st Firework - run GGA optimize structure # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec # CustodianTaskEx - run VASP within a custodian tasks = [VaspWriterTask(), custodiantask] fws.append(Firework(tasks, spec, name=get_name(structure, spec['task_type']), fw_id=1)) # 2nd Firework - insert previous run into DB spec = {'task_type': 'VASP db insertion example'} fws.append( Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=2)) connections[1] = [2] # 3rd Firework - static run. # VaspCopyTask - copy output from previous run to this directory # SetupStaticRunTask - override old parameters for static run # CustodianTaskEx - run VASP within a custodian spec = {'task_type': 'GGA static example'} copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True}) setuptask = SetupStaticRunTask() custodiantask = VaspCustodianTaskEx({'jobs': [VaspJob('', auto_npar=False).as_dict()], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5}) fws.append(Firework([copytask, setuptask, custodiantask], spec, name=get_name(structure, spec['task_type']), fw_id=3)) connections[2] = [3] # 4th Firework - insert previous run into DB spec = {'task_type': 'VASP db insertion example'} fws.append( Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=4)) connections[3] = [4] return Workflow(fws, connections, name=get_slug(structure.formula))
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO', create_launcher_dir=False, fill_mode=False, fw_id=None): """ Submit a single job to the queue. Args: launchpad (LaunchPad) fworker (FWorker) qadapter (QueueAdapterBase) launcher_dir (str): The directory where to submit the job reserve (bool): Whether to queue in reservation mode strm_lvl (str): level at which to stream log messages create_launcher_dir (bool): Whether to create a subfolder launcher+timestamp, if needed fill_mode (bool): whether to submit jobs even when there is nothing to run (only in non-reservation mode) fw_id (int): specific fw_id to reserve (reservation mode only) """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict()) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode if not os.path.exists(launcher_dir): raise ValueError('Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError("Must use reservation mode (-r option) of qlaunch " "when using offline option of rlaunch!!") if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError('Reservation mode of queue launcher only works for singleshot Rocket Launcher!') if fill_mode and reserve: raise ValueError("Fill_mode cannot be used in conjunction with reserve mode!") if fw_id and not reserve: raise ValueError("qlaunch for specific fireworks may only be used in reservation mode.") if fill_mode or launchpad.run_exists(fworker): launch_id = None try: if reserve: if fw_id: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir, fw_id=fw_id) if not fw: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:QUEUE_JOBNAME_MAXLEN] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug('updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) # update launcher_dir if _launch_dir is selected in reserved fw if '_launch_dir' in fw.spec: fw_launch_dir = os.path.expandvars(fw.spec['_launch_dir']) if not os.path.isabs(fw_launch_dir): fw_launch_dir = os.path.join(launcher_dir, fw_launch_dir) launcher_dir = fw_launch_dir makedirs_p(launcher_dir) launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if '--offline' in qadapter['rocket_launch']: setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: raise RuntimeError('queue script could not be submitted, check queue ' 'script/queue adapter/queue server status!') elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') if reserve and launch_id is not None: try: l_logger.info('Un-reserving FW with fw_id, launch_id: {}, {}'.format( fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) launchpad.forget_offline(launch_id) except: log_exception(l_logger, 'Error unreserving FW with fw_id {}'.format(fw.fw_id)) return False else: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return None # note: this is a hack (rather than False) to indicate a soft failure to rapidfire()
def run_task(self, fw_spec): print "sleeping 10s for Mongo" time.sleep(10) print "done sleeping" print "the gap is {}, the cutoff is {}".format(fw_spec["analysis"]["bandgap"], self.gap_cutoff) if fw_spec["analysis"]["bandgap"] >= self.gap_cutoff: static_dens = 90 uniform_dens = 1000 line_dens = 20 else: static_dens = 450 uniform_dens = 1500 line_dens = 30 if fw_spec["analysis"]["bandgap"] <= self.metal_cutoff: user_incar_settings = {"ISMEAR": 1, "SIGMA": 0.2} else: user_incar_settings = {} print "Adding more runs..." type_name = "GGA+U" if "GGA+U" in fw_spec["prev_task_type"] else "GGA" snl = StructureNL.from_dict(fw_spec["mpsnl"]) f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula fws = [] connections = {} priority = fw_spec["_priority"] trackers = [ Tracker("FW_job.out"), Tracker("FW_job.error"), Tracker("vasp.out"), Tracker("OUTCAR"), Tracker("OSZICAR"), ] trackers_db = [Tracker("FW_job.out"), Tracker("FW_job.error")] # run GGA static spec = fw_spec # pass all the items from the current spec to the new spec.update( { "task_type": "{} static v2".format(type_name), "_queueadapter": QA_VASP_SMALL, "_dupefinder": DupeFinderVasp().to_dict(), "_priority": priority, "_trackers": trackers, } ) fws.append( Firework( [ VaspCopyTask({"use_CONTCAR": True, "skip_CHGCAR": True}), SetupStaticRunTask({"kpoints_density": static_dens, "user_incar_settings": user_incar_settings}), get_custodian_task(spec), ], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-10, ) ) # insert into DB - GGA static spec = { "task_type": "VASP db insertion", "_queueadapter": QA_DB, "_allow_fizzled_parents": True, "_priority": priority * 2, "_dupefinder": DupeFinderDB().to_dict(), "_trackers": trackers_db, } fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-9)) connections[-10] = -9 # run GGA Uniform spec = { "task_type": "{} Uniform v2".format(type_name), "_queueadapter": QA_VASP, "_dupefinder": DupeFinderVasp().to_dict(), "_priority": priority, "_trackers": trackers, } fws.append( Firework( [ VaspCopyTask({"use_CONTCAR": False}), SetupNonSCFTask({"mode": "uniform", "kpoints_density": uniform_dens}), get_custodian_task(spec), ], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-8, ) ) connections[-9] = -8 # insert into DB - GGA Uniform spec = { "task_type": "VASP db insertion", "_queueadapter": QA_DB, "_allow_fizzled_parents": True, "_priority": priority * 2, "_dupefinder": DupeFinderDB().to_dict(), "_trackers": trackers_db, } fws.append( Firework( [VaspToDBTask({"parse_uniform": True})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-7 ) ) connections[-8] = -7 # run GGA Band structure spec = { "task_type": "{} band structure v2".format(type_name), "_queueadapter": QA_VASP, "_dupefinder": DupeFinderVasp().to_dict(), "_priority": priority, "_trackers": trackers, } fws.append( Firework( [ VaspCopyTask({"use_CONTCAR": False}), SetupNonSCFTask({"mode": "line", "kpoints_line_density": line_dens}), get_custodian_task(spec), ], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-6, ) ) connections[-7] = [-6] # insert into DB - GGA Band structure spec = { "task_type": "VASP db insertion", "_queueadapter": QA_DB, "_allow_fizzled_parents": True, "_priority": priority * 2, "_dupefinder": DupeFinderDB().to_dict(), "_trackers": trackers_db, } fws.append(Firework([VaspToDBTask({})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-5)) connections[-6] = -5 if fw_spec.get("parameters") and fw_spec["parameters"].get("boltztrap"): # run Boltztrap from mpworks.firetasks.boltztrap_tasks import BoltztrapRunTask spec = { "task_type": "{} Boltztrap".format(type_name), "_queueadapter": QA_DB, "_dupefinder": DupeFinderDB().to_dict(), "_priority": priority, } fws.append(Firework([BoltztrapRunTask()], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-4)) connections[-7].append(-4) wf = Workflow(fws, connections) print "Done adding more runs..." return FWAction(additions=wf)
def snl_to_wf(snl, parameters=None): fws = [] connections = defaultdict(list) parameters = parameters if parameters else {} snl_priority = parameters.get('priority', 1) priority = snl_priority * 2 # once we start a job, keep going! f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula snl_spec = {} if 'snlgroup_id' in parameters: if 'mpsnl' in parameters: snl_spec['mpsnl'] = parameters['mpsnl'] elif isinstance(snl, MPStructureNL): snl_spec['mpsnl'] = snl.as_dict() else: raise ValueError("improper use of force SNL") snl_spec['snlgroup_id'] = parameters['snlgroup_id'] else: # add the SNL to the SNL DB and figure out duplicate group tasks = [AddSNLTask()] spec = {'task_type': 'Add to SNL database', 'snl': snl.as_dict(), '_queueadapter': QA_DB, '_priority': snl_priority} fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0)) connections[0] = [1] trackers = [Tracker('FW_job.out'), Tracker('FW_job.error'), Tracker('vasp.out'), Tracker('OUTCAR'), Tracker('OSZICAR'), Tracker('OUTCAR.relax1'), Tracker('OUTCAR.relax2')] trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')] # run GGA structure optimization spec = _snl_to_spec(snl, enforce_gga=True, parameters=parameters) spec.update(snl_spec) spec['_priority'] = priority spec['_queueadapter'] = QA_VASP spec['_trackers'] = trackers tasks = [VaspWriterTask(), get_custodian_task(spec)] fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1)) # insert into DB - GGA structure optimization spec = {'task_type': 'VASP db insertion', '_priority': priority*2, '_allow_fizzled_parents': True, '_queueadapter': QA_DB, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db} fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2)) connections[1] = [2] # determine if GGA+U FW is needed incar = MPVaspInputSet().get_incar(snl.structure).as_dict() ggau_compound = ('LDAU' in incar and incar['LDAU']) if not parameters.get('skip_bandstructure', False) and (not ggau_compound or parameters.get('force_gga_bandstructure', False)): spec = {'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL} fws.append( Firework([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=3)) connections[2] = [3] if ggau_compound: spec = _snl_to_spec(snl, enforce_gga=False, parameters=parameters) del spec['vasp'] # we are stealing all VASP params and such from previous run spec['_priority'] = priority spec['_queueadapter'] = QA_VASP spec['_trackers'] = trackers fws.append(Firework( [VaspCopyTask(), SetupGGAUTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=10)) connections[2].append(10) spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db} fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=11)) connections[10] = [11] if not parameters.get('skip_bandstructure', False): spec = {'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL} fws.append( Firework([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=12)) connections[11] = [12] wf_meta = get_meta_from_structure(snl.structure) wf_meta['run_version'] = 'May 2013 (1)' # not maintained if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']: wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id'] return Workflow(fws, connections, name=Composition( snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
def get_name(structure, task_type): return get_slug(structure.formula + '--' + task_type)
def run_task(self, fw_spec): # import here to prevent import errors in bigger MPCollab # get the band structure and nelect from files """ prev_dir = get_loc(fw_spec['prev_vasp_dir']) vasprun_loc = zpath(os.path.join(prev_dir, 'vasprun.xml')) kpoints_loc = zpath(os.path.join(prev_dir, 'KPOINTS')) vr = Vasprun(vasprun_loc) bs = vr.get_band_structure(kpoints_filename=kpoints_loc) """ filename = get_slug( 'JOB--' + fw_spec['mpsnl'].structure.composition.reduced_formula + '--' + fw_spec['task_type']) with open(filename, 'w+') as f: f.write('') # get the band structure and nelect from DB block_part = get_block_part(fw_spec['prev_vasp_dir']) db_dir = os.environ['DB_LOC'] assert isinstance(db_dir, object) db_path = os.path.join(db_dir, 'tasks_db.json') with open(db_path) as f: creds = json.load(f) connection = MongoClient(creds['host'], creds['port']) tdb = connection[creds['database']] tdb.authenticate(creds['admin_user'], creds['admin_password']) props = { "calculations": 1, "task_id": 1, "state": 1, "pseudo_potential": 1, "run_type": 1, "is_hubbard": 1, "hubbards": 1, "unit_cell_formula": 1 } m_task = tdb.tasks.find_one({"dir_name": block_part}, props) if not m_task: time.sleep( 60) # only thing to think of is wait for DB insertion(?) m_task = tdb.tasks.find_one({"dir_name": block_part}, props) if not m_task: raise ValueError( "Could not find task with dir_name: {}".format(block_part)) if m_task['state'] != 'successful': raise ValueError( "Cannot run Boltztrap; parent job unsuccessful") nelect = m_task['calculations'][0]['input']['parameters']['NELECT'] bs_id = m_task['calculations'][0]['band_structure_fs_id'] print bs_id, type(bs_id) fs = gridfs.GridFS(tdb, 'band_structure_fs') bs_dict = json.loads(fs.get(bs_id).read()) bs_dict['structure'] = m_task['calculations'][0]['output'][ 'crystal'] bs = BandStructure.from_dict(bs_dict) print("find previous run with block_part {}".format(block_part)) print 'Band Structure found:', bool(bs) print(bs.as_dict()) print("nelect: {}".format(nelect)) # run Boltztrap doping = [] for d in [1e16, 1e17, 1e18, 1e19, 1e20]: doping.extend([1 * d, 2.5 * d, 5 * d, 7.5 * d]) doping.append(1e21) runner = BoltztrapRunner(bs, nelect, doping=doping) dir = runner.run(path_dir=os.getcwd()) # put the data in the database bta = BoltztrapAnalyzer.from_files(dir) # 8/21/15 - Anubhav removed fs_id (also see line further below, ted['boltztrap_full_fs_id'] ...) # 8/21/15 - this is to save space in MongoDB, as well as non-use of full Boltztrap output (vs rerun) """ data = bta.as_dict() data.update(get_meta_from_structure(bs._structure)) data['snlgroup_id'] = fw_spec['snlgroup_id'] data['run_tags'] = fw_spec['run_tags'] data['snl'] = fw_spec['mpsnl'] data['dir_name_full'] = dir data['dir_name'] = get_block_part(dir) data['task_id'] = m_task['task_id'] del data['hall'] # remove because it is too large and not useful fs = gridfs.GridFS(tdb, "boltztrap_full_fs") btid = fs.put(json.dumps(jsanitize(data))) """ # now for the "sanitized" data ted = bta.as_dict() del ted['seebeck'] del ted['hall'] del ted['kappa'] del ted['cond'] # ted['boltztrap_full_fs_id'] = btid ted['snlgroup_id'] = fw_spec['snlgroup_id'] ted['run_tags'] = fw_spec['run_tags'] ted['snl'] = fw_spec['mpsnl'].as_dict() ted['dir_name_full'] = dir ted['dir_name'] = get_block_part(dir) ted['task_id'] = m_task['task_id'] ted['pf_doping'] = bta.get_power_factor(output='tensor', relaxation_time=self.TAU) ted['zt_doping'] = bta.get_zt(output='tensor', relaxation_time=self.TAU, kl=self.KAPPAL) ted['pf_eigs'] = self.get_eigs(ted, 'pf_doping') ted['pf_best'] = self.get_extreme(ted, 'pf_eigs') ted['pf_best_dope18'] = self.get_extreme(ted, 'pf_eigs', max_didx=3) ted['pf_best_dope19'] = self.get_extreme(ted, 'pf_eigs', max_didx=4) ted['zt_eigs'] = self.get_eigs(ted, 'zt_doping') ted['zt_best'] = self.get_extreme(ted, 'zt_eigs') ted['zt_best_dope18'] = self.get_extreme(ted, 'zt_eigs', max_didx=3) ted['zt_best_dope19'] = self.get_extreme(ted, 'zt_eigs', max_didx=4) ted['seebeck_eigs'] = self.get_eigs(ted, 'seebeck_doping') ted['seebeck_best'] = self.get_extreme(ted, 'seebeck_eigs') ted['seebeck_best_dope18'] = self.get_extreme(ted, 'seebeck_eigs', max_didx=3) ted['seebeck_best_dope19'] = self.get_extreme(ted, 'seebeck_eigs', max_didx=4) ted['cond_eigs'] = self.get_eigs(ted, 'cond_doping') ted['cond_best'] = self.get_extreme(ted, 'cond_eigs') ted['cond_best_dope18'] = self.get_extreme(ted, 'cond_eigs', max_didx=3) ted['cond_best_dope19'] = self.get_extreme(ted, 'cond_eigs', max_didx=4) ted['kappa_eigs'] = self.get_eigs(ted, 'kappa_doping') ted['kappa_best'] = self.get_extreme(ted, 'kappa_eigs', maximize=False) ted['kappa_best_dope18'] = self.get_extreme(ted, 'kappa_eigs', maximize=False, max_didx=3) ted['kappa_best_dope19'] = self.get_extreme(ted, 'kappa_eigs', maximize=False, max_didx=4) try: from mpcollab.thermoelectrics.boltztrap_TE import BoltzSPB bzspb = BoltzSPB(ted) maxpf_p = bzspb.get_maximum_power_factor( 'p', temperature=0, tau=1E-14, ZT=False, kappal=0.5, otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) maxpf_n = bzspb.get_maximum_power_factor( 'n', temperature=0, tau=1E-14, ZT=False, kappal=0.5, otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) maxzt_p = bzspb.get_maximum_power_factor( 'p', temperature=0, tau=1E-14, ZT=True, kappal=0.5, otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) maxzt_n = bzspb.get_maximum_power_factor( 'n', temperature=0, tau=1E-14, ZT=True, kappal=0.5, otherprops=('get_seebeck_mu_eig', 'get_conductivity_mu_eig', 'get_thermal_conductivity_mu_eig', 'get_average_eff_mass_tensor_mu')) ted['zt_best_finemesh'] = {'p': maxzt_p, 'n': maxzt_n} ted['pf_best_finemesh'] = {'p': maxpf_p, 'n': maxpf_n} except: import traceback traceback.print_exc() print 'COULD NOT GET FINE MESH DATA' # add is_compatible mpc = MaterialsProjectCompatibility("Advanced") try: func = m_task["pseudo_potential"]["functional"] labels = m_task["pseudo_potential"]["labels"] symbols = ["{} {}".format(func, label) for label in labels] parameters = { "run_type": m_task["run_type"], "is_hubbard": m_task["is_hubbard"], "hubbards": m_task["hubbards"], "potcar_symbols": symbols } entry = ComputedEntry(Composition(m_task["unit_cell_formula"]), 0.0, 0.0, parameters=parameters, entry_id=m_task["task_id"]) ted["is_compatible"] = bool(mpc.process_entry(entry)) except: traceback.print_exc() print 'ERROR in getting compatibility, task_id: {}'.format( m_task["task_id"]) ted["is_compatible"] = None tdb.boltztrap.insert(jsanitize(ted)) update_spec = { 'prev_vasp_dir': fw_spec['prev_vasp_dir'], 'boltztrap_dir': os.getcwd(), 'prev_task_type': fw_spec['task_type'], 'mpsnl': fw_spec['mpsnl'].as_dict(), 'snlgroup_id': fw_spec['snlgroup_id'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters') } return FWAction(update_spec=update_spec)
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO', create_launcher_dir=False, fill_mode=False, fw_id=None): """ Submit a single job to the queue. Args: launchpad (LaunchPad) fworker (FWorker) qadapter (QueueAdapterBase) launcher_dir (str): The directory where to submit the job reserve (bool): Whether to queue in reservation mode strm_lvl (str): level at which to stream log messages create_launcher_dir (bool): Whether to create a subfolder launcher+timestamp, if needed fill_mode (bool): whether to submit jobs even when there is nothing to run (only in non-reservation mode) fw_id (int): specific fw_id to reserve (reservation mode only) """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict( )) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode if not os.path.exists(launcher_dir): raise ValueError( 'Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError("Must use reservation mode (-r option) of qlaunch " "when using offline option of rlaunch!!") if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError( 'Reservation mode of queue launcher only works for singleshot Rocket Launcher!' ) if fill_mode and reserve: raise ValueError( "Fill_mode cannot be used in conjunction with reserve mode!") if fw_id and not reserve: raise ValueError( "qlaunch for specific fireworks may only be used in reservation mode." ) if fill_mode or launchpad.run_exists(fworker): launch_id = None try: if reserve: if fw_id: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir, fw_id=fw_id) if not fw: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!' ) return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:QUEUE_JOBNAME_MAXLEN] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug( 'updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) # update launcher_dir if _launch_dir is selected in reserved fw if '_launch_dir' in fw.spec: fw_launch_dir = os.path.expandvars(fw.spec['_launch_dir']) if not os.path.isabs(fw_launch_dir): fw_launch_dir = os.path.join(launcher_dir, fw_launch_dir) launcher_dir = fw_launch_dir makedirs_p(launcher_dir) launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if '--offline' in qadapter['rocket_launch']: setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: raise RuntimeError( 'queue script could not be submitted, check queue ' 'script/queue adapter/queue server status!') elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') if reserve and launch_id is not None: try: l_logger.info( 'Un-reserving FW with fw_id, launch_id: {}, {}'.format( fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) launchpad.forget_offline(launch_id) except: log_exception( l_logger, 'Error unreserving FW with fw_id {}'.format(fw.fw_id)) return False else: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!') return None # note: this is a hack (rather than False) to indicate a soft failure to rapidfire()
def run_task(self, fw_spec): if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec: prev_dir = get_loc(fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir']) update_spec = {} # add this later when creating new FW fizzled_parent = True parse_dos = False else: prev_dir = get_loc(fw_spec['prev_vasp_dir']) update_spec = {'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['prev_task_type'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters')} fizzled_parent = False parse_dos = 'Uniform' in fw_spec['prev_task_type'] if 'run_tags' in fw_spec: self.additional_fields['run_tags'] = fw_spec['run_tags'] else: self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][0]['spec']['run_tags'] if MOVE_TO_GARDEN_DEV: prev_dir = move_to_garden(prev_dir, prod=False) elif MOVE_TO_GARDEN_PROD: prev_dir = move_to_garden(prev_dir, prod=True) # get the directory containing the db file db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') logging.basicConfig(level=logging.INFO) logger = logging.getLogger('MPVaspDrone') logger.setLevel(logging.INFO) sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) logger.addHandler(sh) with open(db_path) as f: db_creds = json.load(f) drone = MPVaspDrone( host=db_creds['host'], port=db_creds['port'], database=db_creds['database'], user=db_creds['admin_user'], password=db_creds['admin_password'], collection=db_creds['collection'], parse_dos=parse_dos, additional_fields=self.additional_fields, update_duplicates=self.update_duplicates) t_id, d = drone.assimilate(prev_dir, launches_coll=LaunchPad.auto_load().launches) mpsnl = d['snl_final'] if 'snl_final' in d else d['snl'] snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d['snlgroup_id'] update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id}) print 'ENTERED task id:', t_id stored_data = {'task_id': t_id} if d['state'] == 'successful': update_spec['analysis'] = d['analysis'] update_spec['output'] = d['output'] return FWAction(stored_data=stored_data, update_spec=update_spec) # not successful - first test to see if UnconvergedHandler is needed if not fizzled_parent: unconverged_tag = 'unconverged_handler--{}'.format(fw_spec['prev_task_type']) output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml')) ueh = UnconvergedErrorHandler(output_filename=output_dir) if ueh.check() and unconverged_tag not in fw_spec['run_tags']: print 'Unconverged run! Creating dynamic FW...' spec = {'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['task_type'], 'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id, 'task_type': fw_spec['prev_task_type'], 'run_tags': list(fw_spec['run_tags']), 'parameters': fw_spec.get('parameters'), '_dupefinder': DupeFinderVasp().to_dict(), '_priority': fw_spec['_priority']} snl = StructureNL.from_dict(spec['mpsnl']) spec['run_tags'].append(unconverged_tag) spec['_queueadapter'] = QA_VASP fws = [] connections = {} f = Composition.from_formula( snl.structure.composition.reduced_formula).alphabetical_formula fws.append(FireWork( [VaspCopyTask({'files': ['INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CONTCAR'], 'use_CONTCAR': False}), SetupUnconvergedHandlerTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-2)) spec = {'task_type': 'VASP db insertion', '_allow_fizzled_parents': True, '_priority': fw_spec['_priority'], '_queueadapter': QA_DB, 'run_tags': list(fw_spec['run_tags'])} spec['run_tags'].append(unconverged_tag) fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1)) connections[-2] = -1 wf = Workflow(fws, connections) return FWAction(detours=wf) # not successful and not due to convergence problem - FIZZLE raise ValueError("DB insertion successful, but don't know how to fix this FireWork! Can't continue with workflow...")
def snl_to_wf(snl, parameters=None): fws = [] connections = {} parameters = parameters if parameters else {} snl_priority = parameters.get('priority', 1) priority = snl_priority * 2 # once we start a job, keep going! f = Composition.from_formula( snl.structure.composition.reduced_formula).alphabetical_formula # add the SNL to the SNL DB and figure out duplicate group tasks = [AddSNLTask()] spec = { 'task_type': 'Add to SNL database', 'snl': snl.to_dict, '_queueadapter': QA_DB, '_priority': snl_priority } if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL): spec['force_mpsnl'] = snl.to_dict spec['force_snlgroup_id'] = parameters['snlgroup_id'] del spec['snl'] fws.append( FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0)) connections[0] = [1] # run GGA structure optimization spec = _snl_to_spec(snl, enforce_gga=True) spec['_priority'] = priority spec['_queueadapter'] = QA_VASP tasks = [VaspWriterTask(), get_custodian_task(spec)] fws.append( FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1)) # insert into DB - GGA structure optimization spec = { 'task_type': 'VASP db insertion', '_priority': priority, '_allow_fizzled_parents': True, '_queueadapter': QA_DB } fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2)) connections[1] = [2] if not parameters.get('skip_bandstructure', False): spec = { 'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL } fws.append( FireWork([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=3)) connections[2] = [3] # determine if GGA+U FW is needed incar = MPVaspInputSet().get_incar(snl.structure).to_dict if 'LDAU' in incar and incar['LDAU']: spec = _snl_to_spec(snl, enforce_gga=False) del spec[ 'vasp'] # we are stealing all VASP params and such from previous run spec['_priority'] = priority spec['_queueadapter'] = QA_VASP fws.append( FireWork( [VaspCopyTask(), SetupGGAUTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=10)) connections[2].append(10) spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority } fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=11)) connections[10] = [11] if not parameters.get('skip_bandstructure', False): spec = { 'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL } fws.append( FireWork([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=12)) connections[11] = [12] wf_meta = get_meta_from_structure(snl.structure) wf_meta['run_version'] = 'May 2013 (1)' if '_materialsproject' in snl.data and 'submission_id' in snl.data[ '_materialsproject']: wf_meta['submission_id'] = snl.data['_materialsproject'][ 'submission_id'] return Workflow( fws, connections, name=Composition.from_formula( snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
def snl_to_wf(snl, parameters=None): fws = [] connections = defaultdict(list) parameters = parameters if parameters else {} snl_priority = parameters.get('priority', 1) priority = snl_priority * 2 # once we start a job, keep going! f = Composition( snl.structure.composition.reduced_formula).alphabetical_formula snl_spec = {} if 'snlgroup_id' in parameters: if 'mpsnl' in parameters: snl_spec['mpsnl'] = parameters['mpsnl'] elif isinstance(snl, MPStructureNL): snl_spec['mpsnl'] = snl.as_dict() else: raise ValueError("improper use of force SNL") snl_spec['snlgroup_id'] = parameters['snlgroup_id'] else: # add the SNL to the SNL DB and figure out duplicate group tasks = [AddSNLTask()] spec = { 'task_type': 'Add to SNL database', 'snl': snl.as_dict(), '_queueadapter': QA_DB, '_priority': snl_priority } fws.append( Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0)) connections[0] = [1] trackers = [ Tracker('FW_job.out'), Tracker('FW_job.error'), Tracker('vasp.out'), Tracker('OUTCAR'), Tracker('OSZICAR'), Tracker('OUTCAR.relax1'), Tracker('OUTCAR.relax2') ] trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')] # run GGA structure optimization spec = _snl_to_spec(snl, enforce_gga=True, parameters=parameters) spec.update(snl_spec) spec['_priority'] = priority spec['_queueadapter'] = QA_VASP spec['_trackers'] = trackers tasks = [VaspWriterTask(), get_custodian_task(spec)] fws.append( Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1)) # insert into DB - GGA structure optimization spec = { 'task_type': 'VASP db insertion', '_priority': priority * 2, '_allow_fizzled_parents': True, '_queueadapter': QA_DB, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db } fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2)) connections[1] = [2] # determine if GGA+U FW is needed incar = MPVaspInputSet().get_incar(snl.structure).as_dict() ggau_compound = ('LDAU' in incar and incar['LDAU']) if not parameters.get('skip_bandstructure', False) and ( not ggau_compound or parameters.get('force_gga_bandstructure', False)): spec = { 'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL } fws.append( Firework([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=3)) connections[2] = [3] if ggau_compound: spec = _snl_to_spec(snl, enforce_gga=False, parameters=parameters) del spec[ 'vasp'] # we are stealing all VASP params and such from previous run spec['_priority'] = priority spec['_queueadapter'] = QA_VASP spec['_trackers'] = trackers fws.append( Firework( [VaspCopyTask(), SetupGGAUTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=10)) connections[2].append(10) spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db } fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=11)) connections[10] = [11] if not parameters.get('skip_bandstructure', False): spec = { 'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL } fws.append( Firework([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=12)) connections[11] = [12] wf_meta = get_meta_from_structure(snl.structure) wf_meta['run_version'] = 'May 2013 (1)' if '_materialsproject' in snl.data and 'submission_id' in snl.data[ '_materialsproject']: wf_meta['submission_id'] = snl.data['_materialsproject'][ 'submission_id'] return Workflow( fws, connections, name=Composition( snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
def equilibrium_constant_fws(mission, solvent, solvent_method, use_vdw_surface, qm_method, reaction_id, dupefinder=None, priority=1, parent_fwid=None, additional_user_tags=None, depend_on_parent=False): energy_method, sol_qm_method, geom_method = qm_method.split("//") if '||' in energy_method: sp_qm_method, bsse_qm_method = energy_method.split("||") qm_method = "//".join([sp_qm_method, sol_qm_method, geom_method]) else: bsse_qm_method = energy_method coll = get_reactions_collection() reaction_doc = coll.find_one(filter={"reaction_id": reaction_id}) reactant_snls = [StructureNL.from_dict(s) for s in reaction_doc["reactant_snls"]] product_snls = [StructureNL.from_dict(s) for s in reaction_doc["product_snls"]] reactant_nicknames = reaction_doc['reactant_nicknames'] product_nicknames = reaction_doc['product_nicknames'] reactant_charges = reaction_doc['reactant_charges'] product_charges = reaction_doc['product_charges'] reactant_spin_multiplicities = reaction_doc['reactant_spin_multiplicities'] product_spin_multiplicities = reaction_doc['product_spin_multiplicities'] reactant_fragments = reaction_doc['reactant_fragments'] product_fragments = reaction_doc['product_fragments'] fwid_base = 1 if parent_fwid: if not (isinstance(parent_fwid, int) or isinstance(parent_fwid, list)): raise ValueError("Parent FireWork ID must be integer or list") parent_fwid = parent_fwid if isinstance(parent_fwid, list) \ else [parent_fwid] fwid_base = max(parent_fwid) + 1 current_fwid = fwid_base fws = [] links_dict = dict() for snl, nick_name, charge, spin, fragments in \ zip(reactant_snls + product_snls, reactant_nicknames + product_nicknames, reactant_charges + product_charges, reactant_spin_multiplicities + product_spin_multiplicities, reactant_fragments + product_fragments): mol = snl.structure mol.set_charge_and_spin(charge, spin) snl_tasks = [AddEGSNLTask()] snl_spec = {'task_type': 'Add to SNL database', 'snl': snl.as_dict(), '_priority': priority} priority *= 2 # once we start a job, keep going! snl_fw = Firework(snl_tasks, snl_spec, name=get_slug(nick_name + ' -- Add to SNL database'), fw_id=current_fwid) fws.append(snl_fw) sp_fws, sp_links_dict = single_point_energy_fws( mol, name=nick_name, mission=mission, solvent=solvent, solvent_method=solvent_method, use_vdW_surface=use_vdw_surface, qm_method=qm_method, pop_method=None, dupefinder=dupefinder, priority=priority, parent_fwid=snl_fw.fw_id, additional_user_tags=additional_user_tags, depend_on_parent_fw=True, large=True) fws.extend(sp_fws) sp_children = set() sp_parents = set() for k, v2 in sp_links_dict.items(): v1 = links_dict.get(k, []) links_dict[k] = list((set(v1) if isinstance(v1, list) else {v1}) | (set(v2) if isinstance(v2, list) else {v2})) if isinstance(k, list): sp_parents |= set(k) else: sp_parents.add(k) if isinstance(v2, list): sp_children |= set(v2) else: sp_children.add(v2) sp_last_fwids = list(sp_children - sp_parents) bsse_fws, bsse_links_dict = counterpoise_correction_generation_fw( molname=nick_name, charge=charge, spin_multiplicity=spin, qm_method=bsse_qm_method, fragments=fragments, mission=mission, priority=priority, parent_fwid=sp_last_fwids, additional_user_tags=additional_user_tags, large=True) fws.extend(bsse_fws) for k, v2 in bsse_links_dict.items(): v1 = links_dict.get(k, []) links_dict[k] = list((set(v1) if isinstance(v1, list) else {v1}) | (set(v2) if isinstance(v2, list) else {v2})) current_fwid = max([fw.fw_id for fw in bsse_fws]) + 1 if depend_on_parent: all_fwids = [fw.fw_id for fw in fws] for p_fwid in parent_fwid: links_dict[p_fwid] = all_fwids return fws, links_dict
def snl_to_wf(snl, parameters=None): fws = [] connections = {} parameters = parameters if parameters else {} snl_priority = parameters.get('priority', 1) priority = snl_priority * 2 # once we start a job, keep going! f = Composition.from_formula(snl.structure.composition.reduced_formula).alphabetical_formula # add the SNL to the SNL DB and figure out duplicate group tasks = [AddSNLTask()] spec = {'task_type': 'Add to SNL database', 'snl': snl.to_dict, '_queueadapter': QA_DB, '_priority': snl_priority} if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL): spec['force_mpsnl'] = snl.to_dict spec['force_snlgroup_id'] = parameters['snlgroup_id'] del spec['snl'] fws.append(FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0)) connections[0] = [1] # run GGA structure optimization spec = _snl_to_spec(snl, enforce_gga=True) spec['_priority'] = priority spec['_queueadapter'] = QA_VASP tasks = [VaspWriterTask(), get_custodian_task(spec)] fws.append(FireWork(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1)) # insert into DB - GGA structure optimization spec = {'task_type': 'VASP db insertion', '_priority': priority, '_allow_fizzled_parents': True, '_queueadapter': QA_DB} fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2)) connections[1] = [2] if not parameters.get('skip_bandstructure', False): spec = {'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL} fws.append( FireWork([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=3)) connections[2] = [3] # determine if GGA+U FW is needed incar = MPVaspInputSet().get_incar(snl.structure).to_dict if 'LDAU' in incar and incar['LDAU']: spec = _snl_to_spec(snl, enforce_gga=False) del spec['vasp'] # we are stealing all VASP params and such from previous run spec['_priority'] = priority spec['_queueadapter'] = QA_VASP fws.append(FireWork( [VaspCopyTask(), SetupGGAUTask(), get_custodian_task(spec)], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=10)) connections[2].append(10) spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority} fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=11)) connections[10] = [11] if not parameters.get('skip_bandstructure', False): spec = {'task_type': 'Controller: add Electronic Structure v2', '_priority': priority, '_queueadapter': QA_CONTROL} fws.append( FireWork([AddEStructureTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=12)) connections[11] = [12] wf_meta = get_meta_from_structure(snl.structure) wf_meta['run_version'] = 'May 2013 (1)' if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']: wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id'] return Workflow(fws, connections, name=Composition.from_formula( snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO'): """ Submit a single job to the queue. :param launchpad: (LaunchPad) :param fworker: (FWorker) :param qadapter: (QueueAdapterBase) :param launcher_dir: (str) The directory where to submit the job :param reserve: (bool) Whether to queue in reservation mode :param strm_lvl: (str) level at which to stream log messages """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) # get the queue adapter l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict( )) # make a defensive copy, mainly for reservation mode # make sure launch_dir exists: if not os.path.exists(launcher_dir): raise ValueError( 'Desired launch directory {} does not exist!'.format(launcher_dir)) if launchpad.run_exists(fworker): try: # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) os.chdir(launcher_dir) oldlaunch_dir = None if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError( "Must use reservation mode (-r option) of qlaunch when using offline mode (--offline option) of rlaunch!!" ) elif reserve: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad._reserve_fw(fworker, launcher_dir) if not fw: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!' ) return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # set job name to the FW name job_name = get_slug(fw.name) job_name = job_name[0:20] if len(job_name) > 20 else job_name qadapter.update({'job_name': job_name}) # set the job name to FW name if '_queueadapter' in fw.spec: l_logger.debug( 'updating queue params using FireWork spec..') qadapter.update(fw.spec['_queueadapter']) # update the exe to include the FW_id if 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError( 'Reservation mode of queue launcher only works for singleshot Rocket Launcher!' ) qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) if '--offline' in qadapter['rocket_launch']: # handle _launch_dir parameter early... if '_launch_dir' in fw.spec: os.chdir(fw.spec['_launch_dir']) oldlaunch_dir = launcher_dir launcher_dir = os.path.abspath(os.getcwd()) launchpad._change_launch_dir(launch_id, launcher_dir) # write FW.json fw.to_file("FW.json") # write Launchid with open('FW_offline.json', 'w') as f: f.write('{"launch_id":%s}' % launch_id) launchpad.add_offline_run(launch_id, fw.fw_id, fw.name) # write and submit the queue script using the queue adapter l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: raise RuntimeError( 'queue script could not be submitted, check queue adapter and queue server status!' ) elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') return False finally: if oldlaunch_dir: os.chdir( oldlaunch_dir ) # this only matters in --offline mode with _launch_dir! else: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!') return False
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO'): """ Submit a single job to the queue. :param launchpad: (LaunchPad) :param fworker: (FWorker) :param qadapter: (QueueAdapterBase) :param launcher_dir: (str) The directory where to submit the job :param reserve: (bool) Whether to queue in reservation mode :param strm_lvl: (str) level at which to stream log messages """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict()) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode oldlaunch_dir = None # only needed in --offline mode with _launch_dir option if not os.path.exists(launcher_dir): raise ValueError('Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError("Must use reservation mode (-r option) of qlaunch when using offline option of rlaunch!!") if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError('Reservation mode of queue launcher only works for singleshot Rocket Launcher!') if launchpad.run_exists(fworker): try: # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if reserve: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir) if not fw: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:20] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug('updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) if '--offline' in qadapter['rocket_launch']: # handle _launch_dir parameter now b/c we can't call # launchpad.change_launch_dir() later on in offline mode if '_launch_dir' in fw.spec: os.chdir(fw.spec['_launch_dir']) oldlaunch_dir = launcher_dir launcher_dir = os.path.abspath(os.getcwd()) launchpad.change_launch_dir(launch_id, launcher_dir) setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: if reserve: l_logger.info('Un-reserving FW with fw_id, launch_id: {}, {}'.format(fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) raise RuntimeError('queue script could not be submitted, check queue script/queue adapter/queue server status!') elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') return False finally: if oldlaunch_dir: os.chdir(oldlaunch_dir) # this only matters in --offline mode with _launch_dir! else: l_logger.info('No jobs exist in the LaunchPad for submission to queue!') return False
def launch_rocket_to_queue(launchpad, fworker, qadapter, launcher_dir='.', reserve=False, strm_lvl='INFO', create_launcher_dir=False): """ Submit a single job to the queue. :param launchpad: (LaunchPad) :param fworker: (FWorker) :param qadapter: (QueueAdapterBase) :param launcher_dir: (str) The directory where to submit the job :param reserve: (bool) Whether to queue in reservation mode :param strm_lvl: (str) level at which to stream log messages :param create_launcher_dir: (bool) Whether to create a subfolder launcher+timestamp, if needed """ fworker = fworker if fworker else FWorker() launcher_dir = os.path.abspath(launcher_dir) l_logger = get_fw_logger('queue.launcher', l_dir=launchpad.logdir, stream_level=strm_lvl) l_logger.debug('getting queue adapter') qadapter = load_object(qadapter.to_dict( )) # make a defensive copy, mainly for reservation mode fw, launch_id = None, None # only needed in reservation mode if not os.path.exists(launcher_dir): raise ValueError( 'Desired launch directory {} does not exist!'.format(launcher_dir)) if '--offline' in qadapter['rocket_launch'] and not reserve: raise ValueError( "Must use reservation mode (-r option) of qlaunch when using offline option of rlaunch!!" ) if reserve and 'singleshot' not in qadapter.get('rocket_launch', ''): raise ValueError( 'Reservation mode of queue launcher only works for singleshot Rocket Launcher!' ) if launchpad.run_exists(fworker): try: if reserve: l_logger.debug('finding a FW to reserve...') fw, launch_id = launchpad.reserve_fw(fworker, launcher_dir) if not fw: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!' ) return False l_logger.info('reserved FW with fw_id: {}'.format(fw.fw_id)) # update qadapter job_name based on FW name job_name = get_slug(fw.name)[0:QUEUE_JOBNAME_MAXLEN] qadapter.update({'job_name': job_name}) if '_queueadapter' in fw.spec: l_logger.debug( 'updating queue params using Firework spec..') qadapter.update(fw.spec['_queueadapter']) # reservation mode includes --fw_id in rocket launch qadapter['rocket_launch'] += ' --fw_id {}'.format(fw.fw_id) # update launcher_dir if _launch_dir is selected in reserved fw if '_launch_dir' in fw.spec: fw_launch_dir = os.path.expandvars(fw.spec['_launch_dir']) if not os.path.isabs(fw_launch_dir): fw_launch_dir = os.path.join(launcher_dir, fw_launch_dir) launcher_dir = fw_launch_dir try: os.makedirs(launcher_dir) except OSError as exception: if exception.errno != errno.EEXIST: raise launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') launchpad.change_launch_dir(launch_id, launcher_dir) elif create_launcher_dir: # create launcher_dir launcher_dir = create_datestamp_dir(launcher_dir, l_logger, prefix='launcher_') # move to the launch directory l_logger.info('moving to launch_dir {}'.format(launcher_dir)) with cd(launcher_dir): if '--offline' in qadapter['rocket_launch']: setup_offline_job(launchpad, fw, launch_id) l_logger.debug('writing queue script') with open(SUBMIT_SCRIPT_NAME, 'w') as f: queue_script = qadapter.get_script_str(launcher_dir) f.write(queue_script) l_logger.info('submitting queue script') reservation_id = qadapter.submit_to_queue(SUBMIT_SCRIPT_NAME) if not reservation_id: if reserve: l_logger.info( 'Un-reserving FW with fw_id, launch_id: {}, {}'. format(fw.fw_id, launch_id)) launchpad.cancel_reservation(launch_id) raise RuntimeError( 'queue script could not be submitted, check queue script/queue adapter/queue server status!' ) elif reserve: launchpad.set_reservation_id(launch_id, reservation_id) return reservation_id except: log_exception(l_logger, 'Error writing/submitting queue script!') return False else: l_logger.info( 'No jobs exist in the LaunchPad for submission to queue!') return False
def run_task(self, fw_spec): print 'sleeping 10s for Mongo' time.sleep(10) print 'done sleeping' print 'the gap is {}, the cutoff is {}'.format(fw_spec['analysis']['bandgap'], self.gap_cutoff) if fw_spec['analysis']['bandgap'] >= self.gap_cutoff: static_dens = 90 uniform_dens = 1000 line_dens = 20 else: static_dens = 450 uniform_dens = 1500 line_dens = 30 if fw_spec['analysis']['bandgap'] <= self.metal_cutoff: user_incar_settings = {"ISMEAR": 1, "SIGMA": 0.2} else: user_incar_settings = {} print 'Adding more runs...' type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA' snl = StructureNL.from_dict(fw_spec['mpsnl']) f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula fws = [] connections = {} priority = fw_spec['_priority'] trackers = [Tracker('FW_job.out'), Tracker('FW_job.error'), Tracker('vasp.out'), Tracker('OUTCAR'), Tracker('OSZICAR')] trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')] # run GGA static spec = fw_spec # pass all the items from the current spec to the new spec.update({'task_type': '{} static v2'.format(type_name), '_queueadapter': QA_VASP_SMALL, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers}) fws.append( Firework( [VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True}), SetupStaticRunTask({"kpoints_density": static_dens, 'user_incar_settings': user_incar_settings}), get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-10)) # insert into DB - GGA static spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority*2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db} fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-9)) connections[-10] = -9 # run GGA Uniform spec = {'task_type': '{} Uniform v2'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers} fws.append(Firework( [VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'uniform', "kpoints_density": uniform_dens}), get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-8)) connections[-9] = -8 # insert into DB - GGA Uniform spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority*2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db} fws.append( Firework([VaspToDBTask({'parse_uniform': True})], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-7)) connections[-8] = -7 # run GGA Band structure spec = {'task_type': '{} band structure v2'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers} fws.append(Firework([VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'line', "kpoints_line_density": line_dens}), get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-6)) connections[-7] = [-6] # insert into DB - GGA Band structure spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority*2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db} fws.append(Firework([VaspToDBTask({})], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-5)) connections[-6] = -5 if fw_spec.get('parameters') and fw_spec['parameters'].get('boltztrap'): # run Boltztrap spec = {'task_type': '{} Boltztrap'.format(type_name), '_queueadapter': QA_DB, '_dupefinder': DupeFinderDB().to_dict(), '_priority': priority} fws.append(Firework( [BoltztrapRunTask()], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-4)) connections[-7].append(-4) wf = Workflow(fws, connections) print 'Done adding more runs...' return FWAction(additions=wf)
def run_task(self, fw_spec): if '_fizzled_parents' in fw_spec and not 'prev_vasp_dir' in fw_spec: prev_dir = get_loc( fw_spec['_fizzled_parents'][0]['launches'][0]['launch_dir']) update_spec = {} # add this later when creating new FW fizzled_parent = True parse_dos = False else: prev_dir = get_loc(fw_spec['prev_vasp_dir']) update_spec = { 'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['prev_task_type'], 'run_tags': fw_spec['run_tags'], 'parameters': fw_spec.get('parameters') } fizzled_parent = False parse_dos = 'Uniform' in fw_spec['prev_task_type'] if 'run_tags' in fw_spec: self.additional_fields['run_tags'] = fw_spec['run_tags'] else: self.additional_fields['run_tags'] = fw_spec['_fizzled_parents'][ 0]['spec']['run_tags'] if MOVE_TO_GARDEN_DEV: prev_dir = move_to_garden(prev_dir, prod=False) elif MOVE_TO_GARDEN_PROD: prev_dir = move_to_garden(prev_dir, prod=True) # get the directory containing the db file db_dir = os.environ['DB_LOC'] db_path = os.path.join(db_dir, 'tasks_db.json') logging.basicConfig(level=logging.INFO) logger = logging.getLogger('MPVaspDrone') logger.setLevel(logging.INFO) sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) logger.addHandler(sh) with open(db_path) as f: db_creds = json.load(f) drone = MPVaspDrone(host=db_creds['host'], port=db_creds['port'], database=db_creds['database'], user=db_creds['admin_user'], password=db_creds['admin_password'], collection=db_creds['collection'], parse_dos=parse_dos, additional_fields=self.additional_fields, update_duplicates=self.update_duplicates) t_id, d = drone.assimilate( prev_dir, launches_coll=LaunchPad.auto_load().launches) mpsnl = d['snl_final'] if 'snl_final' in d else d['snl'] snlgroup_id = d['snlgroup_id_final'] if 'snlgroup_id_final' in d else d[ 'snlgroup_id'] update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id}) print 'ENTERED task id:', t_id stored_data = {'task_id': t_id} if d['state'] == 'successful': update_spec['analysis'] = d['analysis'] update_spec['output'] = d['output'] update_spec['vasp'] = { 'incar': d['calculations'][-1]['input']['incar'], 'kpoints': d['calculations'][-1]['input']['kpoints'] } update_spec["task_id"] = t_id return FWAction(stored_data=stored_data, update_spec=update_spec) # not successful - first test to see if UnconvergedHandler is needed if not fizzled_parent: unconverged_tag = 'unconverged_handler--{}'.format( fw_spec['prev_task_type']) output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml')) ueh = UnconvergedErrorHandler(output_filename=output_dir) # TODO: make this a little more flexible if ueh.check() and unconverged_tag not in fw_spec['run_tags']: print 'Unconverged run! Creating dynamic FW...' spec = { 'prev_vasp_dir': prev_dir, 'prev_task_type': fw_spec['task_type'], 'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id, 'task_type': fw_spec['prev_task_type'], 'run_tags': list(fw_spec['run_tags']), 'parameters': fw_spec.get('parameters'), '_dupefinder': DupeFinderVasp().to_dict(), '_priority': fw_spec['_priority'] } # Pass elastic tensor spec if 'deformation_matrix' in fw_spec.keys(): spec['deformation_matrix'] = fw_spec['deformation_matrix'] spec['original_task_id'] = fw_spec['original_task_id'] snl = StructureNL.from_dict(spec['mpsnl']) spec['run_tags'].append(unconverged_tag) spec['_queueadapter'] = QA_VASP fws = [] connections = {} f = Composition(snl.structure.composition.reduced_formula ).alphabetical_formula fws.append( Firework([ VaspCopyTask({ 'files': [ 'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CONTCAR' ], 'use_CONTCAR': False }), SetupUnconvergedHandlerTask(), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-2)) spec = { 'task_type': 'VASP db insertion', '_allow_fizzled_parents': True, '_priority': fw_spec['_priority'], '_queueadapter': QA_DB, 'run_tags': list(fw_spec['run_tags']) } if 'deformation_matrix' in fw_spec.keys(): spec['deformation_matrix'] = fw_spec['deformation_matrix'] spec['original_task_id'] = fw_spec['original_task_id'] spec['run_tags'].append(unconverged_tag) fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1)) connections[-2] = -1 wf = Workflow(fws, connections) return FWAction(detours=wf) # not successful and not due to convergence problem - FIZZLE raise ValueError("DB insertion successful, but don't know how to \ fix this Firework! Can't continue with workflow...")
def structure_to_wf(structure): """ This method starts with a Structure object and creates a Workflow object The workflow has two steps - a structure relaxation and a static run :param structure: :return: """ fws = [] # list of FireWorks to run connections = defaultdict(list) # dependencies between FireWorks # generate VASP input objects for 1st VASP run - this is put in the FW spec mpvis = MPGGAVaspInputSet(user_incar_settings={'NPAR': 2}) incar = mpvis.get_incar(structure) poscar = mpvis.get_poscar(structure) kpoints = mpvis.get_kpoints(structure) potcar = mpvis.get_potcar(structure) # serialize the VASP input objects to the FW spec spec = {} spec['vasp'] = {} spec['vasp']['incar'] = incar.as_dict() spec['vasp']['poscar'] = poscar.as_dict() spec['vasp']['kpoints'] = kpoints.as_dict() spec['vasp']['potcar'] = potcar.as_dict() spec['vaspinputset_name'] = mpvis.__class__.__name__ spec['task_type'] = 'GGA optimize structure (2x) example' # set up the custodian that we want to run jobs = VaspJob.double_relaxation_run('') for j in jobs: # turn off auto npar, it doesn't work for >1 node j.auto_npar = False handlers = [ VaspErrorHandler(), FrozenJobErrorHandler(), MeshSymmetryErrorHandler(), NonConvergingErrorHandler() ] c_params = { 'jobs': [j.as_dict() for j in jobs], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5 } custodiantask = VaspCustodianTaskEx(c_params) # 1st Firework - run GGA optimize structure # VaspWriterTask - write input files (INCAR, POSCAR, KPOINTS, POSCAR) based on spec # CustodianTaskEx - run VASP within a custodian tasks = [VaspWriterTask(), custodiantask] fws.append( Firework(tasks, spec, name=get_name(structure, spec['task_type']), fw_id=1)) # 2nd Firework - insert previous run into DB spec = {'task_type': 'VASP db insertion example'} fws.append( Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=2)) connections[1] = [2] # 3rd Firework - static run. # VaspCopyTask - copy output from previous run to this directory # SetupStaticRunTask - override old parameters for static run # CustodianTaskEx - run VASP within a custodian spec = {'task_type': 'GGA static example'} copytask = VaspCopyTask({'use_CONTCAR': True, 'skip_CHGCAR': True}) setuptask = SetupStaticRunTask() custodiantask = VaspCustodianTaskEx({ 'jobs': [VaspJob('', auto_npar=False).as_dict()], 'handlers': [h.as_dict() for h in handlers], 'max_errors': 5 }) fws.append( Firework([copytask, setuptask, custodiantask], spec, name=get_name(structure, spec['task_type']), fw_id=3)) connections[2] = [3] # 4th Firework - insert previous run into DB spec = {'task_type': 'VASP db insertion example'} fws.append( Firework([VaspToDBTaskEx()], spec, name=get_name(structure, spec['task_type']), fw_id=4)) connections[3] = [4] return Workflow(fws, connections, name=get_slug(structure.formula))
def _write_formula_file(self, fw_spec): filename = get_slug( 'JOB--' + fw_spec['mpsnl'].structure.composition.reduced_formula + '--' + fw_spec['task_type']) with open(filename, 'w+') as f: f.write('')
def run_task(self, fw_spec): print "sleeping 10s for Mongo" time.sleep(10) print "done sleeping" print "the gap is {}, the cutoff is {}".format(fw_spec["analysis"]["bandgap"], self.gap_cutoff) if fw_spec["analysis"]["bandgap"] >= self.gap_cutoff: print "Adding more runs..." type_name = "GGA+U" if "GGA+U" in fw_spec["prev_task_type"] else "GGA" snl = StructureNL.from_dict(fw_spec["mpsnl"]) f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula fws = [] connections = {} priority = fw_spec["_priority"] # run GGA static spec = fw_spec # pass all the items from the current spec to the new # one spec.update( { "task_type": "{} static".format(type_name), "_queueadapter": QA_VASP, "_dupefinder": DupeFinderVasp().to_dict(), "_priority": priority, } ) fws.append( Firework( [VaspCopyTask({"use_CONTCAR": True}), SetupStaticRunTask(), get_custodian_task(spec)], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-10, ) ) # insert into DB - GGA static spec = { "task_type": "VASP db insertion", "_queueadapter": QA_DB, "_allow_fizzled_parents": True, "_priority": priority, "_dupefinder": DupeFinderDB().to_dict(), } fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-9)) connections[-10] = -9 # run GGA Uniform spec = { "task_type": "{} Uniform".format(type_name), "_queueadapter": QA_VASP, "_dupefinder": DupeFinderVasp().to_dict(), "_priority": priority, } fws.append( Firework( [ VaspCopyTask({"use_CONTCAR": False}), SetupNonSCFTask({"mode": "uniform"}), get_custodian_task(spec), ], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-8, ) ) connections[-9] = -8 # insert into DB - GGA Uniform spec = { "task_type": "VASP db insertion", "_queueadapter": QA_DB, "_allow_fizzled_parents": True, "_priority": priority, "_dupefinder": DupeFinderDB().to_dict(), } fws.append( Firework( [VaspToDBTask({"parse_uniform": True})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-7 ) ) connections[-8] = -7 # run GGA Band structure spec = { "task_type": "{} band structure".format(type_name), "_queueadapter": QA_VASP, "_dupefinder": DupeFinderVasp().to_dict(), "_priority": priority, } fws.append( Firework( [VaspCopyTask({"use_CONTCAR": False}), SetupNonSCFTask({"mode": "line"}), get_custodian_task(spec)], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-6, ) ) connections[-7] = -6 # insert into DB - GGA Band structure spec = { "task_type": "VASP db insertion", "_queueadapter": QA_DB, "_allow_fizzled_parents": True, "_priority": priority, "_dupefinder": DupeFinderDB().to_dict(), } fws.append(Firework([VaspToDBTask({})], spec, name=get_slug(f + "--" + spec["task_type"]), fw_id=-5)) connections[-6] = -5 wf = Workflow(fws, connections) print "Done adding more runs..." return FWAction(additions=wf) return FWAction()
def snl_to_eg_wf(snl, parameters=None): fws_all = [] parameters = parameters if parameters else {} snl_priority = parameters.get('priority', 1) mission = parameters.get('mission', 'Electron Genome Production') priority = snl_priority * 2 # once we start a job, keep going! f = Composition(snl.structure.composition.reduced_formula). \ alphabetical_formula molname = parameters.get("nick_name", f) if "reaction_id" not in parameters: # add the SNL to the SNL DB and figure out duplicate group tasks = [AddEGSNLTask()] spec = { 'task_type': 'Add to SNL database', 'snl': snl.as_dict(), '_priority': snl_priority } if 'snlgroup_id' in parameters and isinstance(snl, EGStructureNL): spec['force_egsnl'] = snl.as_dict() spec['force_snlgroup_id'] = parameters['snlgroup_id'] del spec['snl'] fws_all.append( Firework(tasks, spec, name=get_slug(molname + ' -- Add to SNL database'), fw_id=1)) default_solvents = [ 'diglym', 'acetonitrile', 'dmso', 'thf', 'dimethylamine', 'dimethoxyethane', 'dimethylaniline', 'tetraglyme' ] workflow_type = parameters.get('workflow', 'ipea') ref_charge = parameters.get('ref_charge', 0) spin_multiplicities = parameters.get('spin_multiplicities', (2, 1, 2)) user_tags = {"initial_charge": ref_charge} if 'derivation_name' in parameters: user_tags['derivation_name'] = parameters['derivation_name'] solvent_method = parameters.get("solvent_method", "ief-pcm") use_vdw_surface = parameters.get("use_vdW_surface", False) qm_method = parameters.get("qm_method", None) population_method = parameters.get("population_method", None) check_large = parameters.get("check_large", True) if workflow_type == 'ipea': solvent = parameters.get('solvent', "water") fws_tasks, connections = multistep_ipea_fws( mol=snl.structure, name=molname, mission=mission, solvent=solvent, solvent_method=solvent_method, use_vdW_surface=use_vdw_surface, ref_charge=ref_charge, spin_multiplicities=spin_multiplicities, dupefinder=DupeFinderEG(), priority=priority, parent_fwid=1, additional_user_tags=user_tags, qm_method=qm_method, check_large=check_large) elif workflow_type == 'multiple solvent ipea': solvents = parameters.get('solvents', default_solvents) fws_tasks, connections = multi_solvent_ipea_fws( mol=snl.structure, name=molname, mission=mission, solvents=solvents, solvent_method=solvent_method, use_vdW_surface=use_vdw_surface, ref_charge=ref_charge, spin_multiplicities=spin_multiplicities, dupefinder=DupeFinderEG(), priority=priority, parent_fwid=1, additional_user_tags=user_tags, qm_method=qm_method) elif workflow_type == 'solvation energy': solvents = parameters.get('solvents', default_solvents) fws_tasks, connections = solvation_energy_fws( mol=snl.structure, name=molname, mission=mission, solvents=solvents, solvent_method=solvent_method, use_vdW_surface=use_vdw_surface, dupefinder=DupeFinderEG(), priority=priority, parent_fwid=1, additional_user_tags=user_tags, qm_method=qm_method) elif workflow_type == "single point energy": solvent = parameters.get('solvent', "water") fws_tasks, connections = single_point_energy_fws( mol=snl.structure, name=molname, mission=mission, solvent=solvent, solvent_method=solvent_method, use_vdW_surface=use_vdw_surface, qm_method=qm_method, pop_method=population_method, dupefinder=DupeFinderEG(), priority=priority, parent_fwid=1, additional_user_tags=user_tags) elif workflow_type == "md relax": high_temperature = parameters.get("high_temperature", 323.15) low_temperature = parameters.get("low_temperature", 273.15) md_steps = parameters.get("md_steps", 500) time_step = parameters.get("time_step", 1.0) md_runs = parameters.get("md_runs", 3) normal_basis = parameters.get("normal_basis", "6-31G*") diffuse_basis = parameters.get("diffuse_basis", "6-31+G*") charge_threshold = parameters.get("charge_threshold", -0.5) fws_tasks, connections = md_relax_fws( mol=snl.structure, name=molname, mission=mission, qm_method=qm_method, high_temperature=high_temperature, low_temperature=low_temperature, md_steps=md_steps, time_step=time_step, md_runs=md_runs, normal_basis=normal_basis, diffuse_basis=diffuse_basis, charge_threshold=charge_threshold, dupefinder=DupeFinderEG(), priority=priority, parent_fwid=1, additional_user_tags=user_tags) elif workflow_type == "equilibrium constant": solvent = parameters.get('solvent', "water") reaction_id = parameters.get('reaction_id') fws_tasks, connections = equilibrium_constant_fws( mission=mission, solvent=solvent, solvent_method=solvent_method, use_vdw_surface=use_vdw_surface, qm_method=qm_method, reaction_id=reaction_id, dupefinder=DupeFinderEG(), priority=priority, parent_fwid=None, additional_user_tags=user_tags, depend_on_parent=False) elif workflow_type == "bsse": charge = snl.structure.charge spin_multiplicity = snl.structure.spin_multiplicity fragments = parameters.get("fragments", None) fws_tasks, connections = counterpoise_correction_generation_fw( molname=molname, charge=charge, spin_multiplicity=spin_multiplicity, qm_method=qm_method, fragments=fragments, mission=mission, priority=priority, parent_fwid=1, additional_user_tags=user_tags) else: raise ValueError( 'Workflow "{}" is not supported yet'.format(workflow_type)) fws_all.extend(fws_tasks) wf_meta = get_meta_from_structure(snl.structure) wf_meta['run_version'] = 'Jan 27, 2014' if '_electrolytegenome' in snl.data and \ 'submission_id' in snl.data['_electrolytegenome']: wf_meta['submission_id'] = snl.data['_electrolytegenome'][ 'submission_id'] return Workflow(fws_all, connections, name=molname, metadata=wf_meta)
def main(): import argparse parser = argparse.ArgumentParser( description="Run A QChem Job for a QChem Input File") parser.add_argument( "-d", "--directory", dest="directory", type=str, required=True, help= "the directory contains all the QChem jobs to be pretended to run again" ) parser.add_argument("-p", "--priority", dest="priority", type=int, default=100, help="the FireWorks priority") parser.add_argument("-b", "--batch_size", dest="batch_size", type=int, default=100, help="the number of FireWorks in a Workflow") options = parser.parse_args() fw_priority = options.priority batch_size = options.batch_size lp = LaunchPad.auto_load() src_dir = os.path.abspath(options.directory) src_dir_sub_dirs = glob.glob(os.path.join(src_dir, "*")) num_dirs = len(src_dir_sub_dirs) current_fwid = 1 links_dict = dict() fws_all = [] num_fw_in_current_batch = 0 batch_num = 1 for i, sd in enumerate(src_dir_sub_dirs): if not os.path.isdir(sd): continue fw_json_filename = os.path.join(sd, "FW.json") if not (os.path.exists(fw_json_filename) or os.path.exists(fw_json_filename + ".gz")): continue with zopen(zpath(fw_json_filename), 'rt') as f: fw_dict = json.load(f) print("{percent:4.2%} completed, processing directory {d:s}, " "molecule name {molname:s}," \ " mission {mission:s}".format(percent=i / float(num_dirs), d=sd, molname= fw_dict['spec']['user_tags'][ 'molname'], mission= fw_dict['spec']['user_tags'][ 'mission'])) molname = fw_dict['spec']['user_tags']['molname'] egsnl_tasks = [AddEGSNLTask()] if 'mol' in fw_dict: mol = Molecule.from_dict(fw_dict['spec']['mol']) else: mol = Molecule.from_dict( fw_dict['spec']['qcinp']['jobs'][0]['molecule']) snl = StructureNL(mol, "Xiaohui Qu <*****@*****.**>", "Electrolyte Genome") egsnl_task_spec = { 'task_type': 'Add to SNL database', 'snl': snl.as_dict(), '_category': 'Parse Previous QChem Job', '_priority': fw_priority } snl_fw_id = current_fwid current_fwid += 1 fws_all.append( Firework( egsnl_tasks, egsnl_task_spec, name=get_slug(molname + ' -- Add to SNL database For fake QChem Task'), fw_id=snl_fw_id)) fake_qchem_tasks = [FakeRunQChemTask()] src_qchem_dir = sd fake_qchem_spec = { '_priority': fw_priority * 2, 'src_qchem_dir': src_qchem_dir, '_category': 'Parse Previous QChem Job', 'run_tags': fw_dict['spec']['run_tags'], 'implicit_solvent': fw_dict['spec']['implicit_solvent'], 'task_type': fw_dict['spec']['task_type'], 'charge': fw_dict['spec']['charge'], 'spin_multiplicity': fw_dict['spec']['spin_multiplicity'], 'num_atoms': fw_dict['spec']['num_atoms'], 'user_tags': fw_dict['spec']['user_tags'], 'mol': mol.as_dict(), 'inchi': fw_dict['spec']['inchi'], '_dupefinder': fw_dict['spec']['_dupefinder'], 'qcinp': fw_dict['spec']['qcinp'], 'qm_method': fw_dict['spec']['qm_method'], 'inchi_root': fw_dict['spec']['inchi_root'] } for k in ['mixed_basis', 'mixed_aux_basis']: if k in fw_dict['spec']: fake_qchem_spec[k] = fw_dict['spec'][k] fake_qchem_fw_id = current_fwid current_fwid += 1 fws_all.append( Firework(fake_qchem_tasks, fake_qchem_spec, name='Fake' + fw_dict['name'], fw_id=fake_qchem_fw_id)) links_dict[snl_fw_id] = fake_qchem_fw_id num_fw_in_current_batch += 1 if num_fw_in_current_batch >= 100: wf = Workflow(fws_all, links_dict, "Read Previous QChem Jobs Id-{}".format(batch_num)) lp.add_wf(wf) batch_num += 1 links_dict = dict() fws_all = [] num_fw_in_current_batch = 0 if num_fw_in_current_batch > 0: wf = Workflow(fws_all, links_dict, "Read Previous QChem Jobs") lp.add_wf(wf)
def _write_formula_file(self, fw_spec): filename = get_slug('JOB--' + fw_spec['mpsnl']['reduced_cell_formula_abc'] + '--' + fw_spec['task_type']) with open(filename, 'w+') as f: f.write('')
def run_task(self, fw_spec): print 'sleeping 10s for Mongo' time.sleep(10) print 'done sleeping' print 'the gap is {}, the cutoff is {}'.format( fw_spec['analysis']['bandgap'], self.gap_cutoff) if fw_spec['analysis']['bandgap'] >= self.gap_cutoff: print 'Adding more runs...' type_name = 'GGA+U' if 'GGA+U' in fw_spec[ 'prev_task_type'] else 'GGA' snl = StructureNL.from_dict(fw_spec['mpsnl']) f = Composition( snl.structure.composition.reduced_formula).alphabetical_formula fws = [] connections = {} priority = fw_spec['_priority'] # run GGA static spec = fw_spec # pass all the items from the current spec to the new # one spec.update({ 'task_type': '{} static'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority }) fws.append( Firework([ VaspCopyTask({'use_CONTCAR': True}), SetupStaticRunTask(), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-10)) # insert into DB - GGA static spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority, "_dupefinder": DupeFinderDB().to_dict() } fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-9)) connections[-10] = -9 # run GGA Uniform spec = { 'task_type': '{} Uniform'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority } fws.append( Firework([ VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'uniform'}), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-8)) connections[-9] = -8 # insert into DB - GGA Uniform spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority, "_dupefinder": DupeFinderDB().to_dict() } fws.append( Firework([VaspToDBTask({'parse_uniform': True})], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-7)) connections[-8] = -7 # run GGA Band structure spec = { 'task_type': '{} band structure'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority } fws.append( Firework([ VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'line'}), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-6)) connections[-7] = -6 # insert into DB - GGA Band structure spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority, "_dupefinder": DupeFinderDB().to_dict() } fws.append( Firework([VaspToDBTask({})], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-5)) connections[-6] = -5 wf = Workflow(fws, connections) print 'Done adding more runs...' return FWAction(additions=wf) return FWAction()
def _write_formula_file(self, fw_spec): filename = get_slug( 'JOB--' + fw_spec['mpsnl']['reduced_cell_formula_abc'] + '--' + fw_spec['task_type']) with open(filename, 'w+') as f: f.write('')
def run_task(self, fw_spec): print 'sleeping 10s for Mongo' time.sleep(10) print 'done sleeping' print 'the gap is {}, the cutoff is {}'.format( fw_spec['analysis']['bandgap'], self.gap_cutoff) if fw_spec['analysis']['bandgap'] >= self.gap_cutoff: static_dens = 90 uniform_dens = 1000 line_dens = 20 else: static_dens = 450 uniform_dens = 1500 line_dens = 30 if fw_spec['analysis']['bandgap'] <= self.metal_cutoff: user_incar_settings = {"ISMEAR": 1, "SIGMA": 0.2} else: user_incar_settings = {} print 'Adding more runs...' type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA' snl = StructureNL.from_dict(fw_spec['mpsnl']) f = Composition( snl.structure.composition.reduced_formula).alphabetical_formula fws = [] connections = {} priority = fw_spec['_priority'] trackers = [ Tracker('FW_job.out'), Tracker('FW_job.error'), Tracker('vasp.out'), Tracker('OUTCAR'), Tracker('OSZICAR') ] trackers_db = [Tracker('FW_job.out'), Tracker('FW_job.error')] # run GGA static spec = fw_spec # pass all the items from the current spec to the new spec.update({ 'task_type': '{} static v2'.format(type_name), '_queueadapter': QA_VASP_SMALL, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers }) fws.append( Firework([ VaspCopyTask({ 'use_CONTCAR': True, 'skip_CHGCAR': True }), SetupStaticRunTask({ "kpoints_density": static_dens, 'user_incar_settings': user_incar_settings }), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-10)) # insert into DB - GGA static spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority * 2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db } fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-9)) connections[-10] = -9 # run GGA Uniform spec = { 'task_type': '{} Uniform v2'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers } fws.append( Firework([ VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({ 'mode': 'uniform', "kpoints_density": uniform_dens }), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-8)) connections[-9] = -8 # insert into DB - GGA Uniform spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority * 2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db } fws.append( Firework([VaspToDBTask({'parse_uniform': True})], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-7)) connections[-8] = -7 # run GGA Band structure spec = { 'task_type': '{} band structure v2'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority, '_trackers': trackers } fws.append( Firework([ VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({ 'mode': 'line', "kpoints_line_density": line_dens }), get_custodian_task(spec) ], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-6)) connections[-7] = [-6] # insert into DB - GGA Band structure spec = { 'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority * 2, "_dupefinder": DupeFinderDB().to_dict(), '_trackers': trackers_db } fws.append( Firework([VaspToDBTask({})], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-5)) connections[-6] = -5 if fw_spec.get('parameters') and fw_spec['parameters'].get( 'boltztrap'): # run Boltztrap from mpworks.firetasks.boltztrap_tasks import BoltztrapRunTask spec = { 'task_type': '{} Boltztrap'.format(type_name), '_queueadapter': QA_DB, '_dupefinder': DupeFinderDB().to_dict(), '_priority': priority } fws.append( Firework([BoltztrapRunTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-4)) connections[-7].append(-4) wf = Workflow(fws, connections) print 'Done adding more runs...' return FWAction(additions=wf)
def task_dict_to_wf(task_dict, launchpad): fw_id = launchpad.get_new_fw_id() l_id = launchpad.get_new_launch_id() spec = { "task_type": task_dict["task_type"], "run_tags": task_dict["run_tags"], "vaspinputset_name": None, "vasp": None, "mpsnl": task_dict["snl"], "snlgroup_id": task_dict["snlgroup_id"], } tasks = [DummyLegacyTask()] launch_dir = task_dict["dir_name_full"] stored_data = {"error_list": []} update_spec = { "prev_vasp_dir": task_dict["dir_name"], "prev_task_type": spec["task_type"], "mpsnl": spec["mpsnl"], "snlgroup_id": spec["snlgroup_id"], "run_tags": spec["run_tags"], } fwaction = FWAction(stored_data=stored_data, update_spec=update_spec) if task_dict["completed_at"]: complete_date = datetime.datetime.strptime(task_dict["completed_at"], "%Y-%m-%d %H:%M:%S") state_history = [{"created_on": complete_date, "state": "COMPLETED"}] else: state_history = [] launches = [ Launch( "COMPLETED", launch_dir, fworker=None, host=None, ip=None, action=fwaction, state_history=state_history, launch_id=l_id, fw_id=fw_id, ) ] f = Composition(task_dict["pretty_formula"]).alphabetical_formula fw = Firework( tasks, spec, name=get_slug(f + "--" + spec["task_type"]), launches=launches, state="COMPLETED", created_on=None, fw_id=fw_id, ) wf_meta = get_meta_from_structure(Structure.from_dict(task_dict["snl"])) wf_meta["run_version"] = "preproduction (0)" wf = Workflow.from_FireWork(fw, name=f, metadata=wf_meta) launchpad.add_wf(wf, reassign_all=False) launchpad._upsert_launch(launches[0]) print "ADDED", fw_id # return fw_id return fw_id
def run_task(self, fw_spec): print 'sleeping 10s for Mongo' time.sleep(10) print 'done sleeping' print 'the gap is {}, the cutoff is {}'.format(fw_spec['analysis']['bandgap'], self.gap_cutoff) if fw_spec['analysis']['bandgap'] >= self.gap_cutoff: print 'Adding more runs...' type_name = 'GGA+U' if 'GGA+U' in fw_spec['prev_task_type'] else 'GGA' snl = StructureNL.from_dict(fw_spec['mpsnl']) f = Composition.from_formula(snl.structure.composition.reduced_formula).alphabetical_formula fws = [] connections = {} priority = fw_spec['_priority'] # run GGA static spec = fw_spec # pass all the items from the current spec to the new # one spec.update({'task_type': '{} static'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority}) fws.append( FireWork( [VaspCopyTask({'use_CONTCAR': True}), SetupStaticRunTask(), get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-10)) # insert into DB - GGA static spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority} fws.append( FireWork([VaspToDBTask()], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-9)) connections[-10] = -9 # run GGA Uniform spec = {'task_type': '{} Uniform'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority} fws.append(FireWork( [VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'uniform'}), get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-8)) connections[-9] = -8 # insert into DB - GGA Uniform spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority} fws.append( FireWork([VaspToDBTask({'parse_uniform': True})], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-7)) connections[-8] = -7 # run GGA Band structure spec = {'task_type': '{} band structure'.format(type_name), '_queueadapter': QA_VASP, '_dupefinder': DupeFinderVasp().to_dict(), '_priority': priority} fws.append(FireWork([VaspCopyTask({'use_CONTCAR': False}), SetupNonSCFTask({'mode': 'line'}), get_custodian_task(spec)], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-6)) connections[-7] = -6 # insert into DB - GGA Band structure spec = {'task_type': 'VASP db insertion', '_queueadapter': QA_DB, '_allow_fizzled_parents': True, '_priority': priority} fws.append(FireWork([VaspToDBTask({})], spec, name=get_slug(f+'--'+spec['task_type']), fw_id=-5)) connections[-6] = -5 wf = Workflow(fws, connections) print 'Done adding more runs...' return FWAction(additions=wf) return FWAction()