def rpath_ref_idx(ts_dct, scn_vals, coord_name, scn_prefix, ene_info1, ene_info2): """ Get the reference energy along a reaction path """ # Set up the filesystem zma_fs = autofile.fs.zmatrix(scn_prefix) zma_path = zma_fs[-1].path([0]) scn_fs = autofile.fs.scan(zma_path) ene_info1 = ene_info1[1][0][1] ene_info2 = ene_info2[0] ioprinter.debug_message('mod_eneinf1', ene_info1) ioprinter.debug_message('mod_eneinf2', ene_info2) mod_ene_info1 = tinfo.modify_orb_label(sinfo.from_dct(ts_dct), ene_info1) mod_ene_info2 = tinfo.modify_orb_label(sinfo.from_dct(ts_dct), ene_info2) ene1, ene2, ref_val = None, None, None for val in reversed(scn_vals): locs = [[coord_name], [val]] path = scn_fs[-1].path(locs) hs_fs = autofile.fs.high_spin(path) if hs_fs[-1].file.energy.exists(mod_ene_info1[1:4]): ene1 = hs_fs[-1].file.energy.read(mod_ene_info1[1:4]) if hs_fs[-1].file.energy.exists(mod_ene_info2[1:4]): ene2 = hs_fs[-1].file.energy.read(mod_ene_info2[1:4]) if ene1 is not None and ene2 is not None: ref_val = val break if ref_val is not None: scn_idx = scn_vals.index(ref_val) return scn_idx, ene1, ene2
def set_rpath_filesys(ts_dct, level): """ Gets filesystem objects for reading many calculations """ # Set the spc_info spc_info = sinfo.from_dct(ts_dct) # Set some path stuff save_path = ts_dct['rxn_fs'][3] run_path = ts_dct['rxn_fs'][2] # Set theory filesystem used throughout thy_save_fs = autofile.fs.theory(save_path) thy_run_fs = autofile.fs.theory(run_path) levelp = tinfo.modify_orb_label(level[1], spc_info) # Get the save fileystem path save_path = thy_save_fs[-1].path(levelp[1:4]) run_path = thy_run_fs[-1].path(levelp[1:4]) thy_save_fs[-1].create(levelp[1:4]) thy_run_fs[-1].create(levelp[1:4]) thy_save_path = thy_save_fs[-1].path(levelp[1:4]) thy_run_path = thy_run_fs[-1].path(levelp[1:4]) ts_save_fs = autofile.fs.transition_state(thy_save_path) ts_save_fs[0].create() ts_save_path = ts_save_fs[0].path() ts_run_fs = autofile.fs.transition_state(thy_run_path) ts_run_fs[0].create() ts_run_path = ts_run_fs[0].path() return ts_run_path, ts_save_path, thy_run_path, thy_save_path
def thermo_paths(spc_dct, spc_queue, spc_mods, run_prefix): """ Set up the path for saving the pf input and output. Placed in a MESSPF, NASA dirs high in run filesys. """ thm_paths = [] for spc_name in spc_queue: thm_path = {} for idx, mod in enumerate(spc_mods): spc_info = sinfo.from_dct(spc_dct[spc_name]) spc_formula = automol.inchi.formula_string(spc_info[0]) thm_prefix = [spc_formula, automol.inchi.inchi_key(spc_info[0])] print('thm_prefix test:', thm_prefix) thm_path[mod] = (job_path(run_prefix, 'MESS', 'PF', thm_prefix, locs_idx=idx), job_path(run_prefix, 'THERM', 'NASA', thm_prefix, locs_idx=idx)) thm_paths.append(thm_path) return thm_paths
def from_dct(reacs, prods, spc_dct, rxn_mul='low', sort=False): """ prepare rxn info and reverse the reactants and products if reaction is endothermic """ # Build the tuples of the reacs+prods infos rxn_ichs, rxn_chgs, rxn_muls = tuple(), tuple(), tuple() for side in (reacs, prods): ichs, chgs, muls = tuple(), tuple(), tuple() for rct in side: spc_info = spc.from_dct(spc_dct[rct]) ichs += (spc.value(spc_info, par.SPC.INCHI), ) chgs += (spc.value(spc_info, par.SPC.CHARGE), ) muls += (spc.value(spc_info, par.SPC.MULT), ) rxn_ichs += (ichs, ) rxn_chgs += (chgs, ) rxn_muls += (muls, ) # Determine the multiplicity of the full reaction _, ts_mul_low, ts_mul_high = rxn_chg_mult(rxn_muls, rxn_chgs) ts_mul = ts_mul_low if rxn_mul == 'low' else ts_mul_high rxn_info = (rxn_ichs, rxn_chgs, rxn_muls, ts_mul) if sort: rxn_info = sort(rxn_info) return rxn_info
def set_etrans_well(rxn_lst, spc_dct): """ Build info object for reference well on PES """ well_dct = None _, (reacs, prods) = rxn_lst[0] if len(reacs) == 1: well_dct = spc_dct[reacs[0]] elif len(prods) == 1: well_dct = spc_dct[prods[0]] else: rct1_dct = spc_dct[reacs[0]] rct2_dct = spc_dct[reacs[1]] rct1_count = automol.geom.count( automol.inchi.geometry(rct1_dct['inchi'])) rct2_count = automol.geom.count( automol.inchi.geometry(rct2_dct['inchi'])) if rct1_count > rct2_count: well_dct = rct1_dct else: well_dct = rct2_dct well_info = sinfo.from_dct(well_dct) return well_info
def from_dct(reacs, prods, spc_dct, rxn_mul='low'): """ Construct a full reaction info object using the names of the reactants and products, which are used to read a species dictionary for the required physical information. :param reacs: names of the reactants :type reacs: tuple(str) :param prods: names of the products :type prods: tuple(str) :param spc_dct: :type spc_dct: dict[] :param rxn_mul: multiplicity of reaction to store in object :type rxn_mul: str """ # Build the tuples of the reacs+prods infos rxn_ichs, rxn_chgs, rxn_muls = tuple(), tuple(), tuple() for side in (reacs, prods): ichs, chgs, muls = tuple(), tuple(), tuple() for rct in side: spc_info = spc.from_dct(spc_dct[rct]) ichs += (spc.value(spc_info, par.SPC.INCHI),) chgs += (spc.value(spc_info, par.SPC.CHARGE),) muls += (spc.value(spc_info, par.SPC.MULT),) rxn_ichs += (ichs,) rxn_chgs += (chgs,) rxn_muls += (muls,) # Determine the multiplicity of the full reaction fake_rxn_info = (rxn_ichs, rxn_chgs, rxn_muls, ()) ts_mul = ts_mult(fake_rxn_info, rxn_mul=rxn_mul) rxn_info = (rxn_ichs, rxn_chgs, rxn_muls, ts_mul) return rxn_info
def instability_transformation(spc_dct, spc_name, thy_info, save_prefix, zma_locs=(0, )): """ see if a species and unstable and handle task management """ spc_info = sinfo.from_dct(spc_dct[spc_name]) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) _, cnf_save_fs = build_fs('', save_prefix, 'CONFORMER', spc_locs=spc_info, thy_locs=mod_thy_info[1:]) # Check if any locs exist first? ini_loc_info = min_energy_conformer_locators(cnf_save_fs, mod_thy_info) _, min_cnf_path = ini_loc_info zma_save_fs = autofile.fs.zmatrix(min_cnf_path) # Check if the instability files exist if zma_save_fs[-1].file.instability.exists(zma_locs): instab_trans = zma_save_fs[-1].file.instability.read(zma_locs) zma = zma_save_fs[-1].file.zmatrix.read(zma_locs) _instab = (instab_trans, zma) path = zma_save_fs[-1].file.zmatrix.path(zma_locs) else: _instab = None path = None return _instab, path
def conformer_list_from_models(spc_name, thy_dct, print_keyword_dct, save_prefix, run_prefix, spc_dct_i, spc_mod_dct_i): """ Create a list of conformers based on the species name and model.dat info """ # conformer range cnf_range = _set_conf_range(print_keyword_dct) hbond_cutoffs = spc_dct_i['hbond_cutoffs'] # thy_info build thy_info = spc_mod_dct_i['vib']['geolvl'][1][1] spc_info = sinfo.from_dct(spc_dct_i) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) sort_info_lst = _set_sort_info_lst(print_keyword_dct['sort'], thy_dct, spc_info) zrxn = spc_dct_i.get('zrxn', None) _root = filesys.root_locs(spc_dct_i, name=spc_name, saddle=(zrxn is not None)) _, cnf_save_fs = filesys.build_fs(run_prefix, save_prefix, 'CONFORMER', thy_locs=mod_thy_info[1:], **_root) rng_cnf_locs_lst, rng_cnf_locs_path = filesys.mincnf.conformer_locators( cnf_save_fs, mod_thy_info, cnf_range=cnf_range, sort_info_lst=sort_info_lst, hbond_cutoffs=hbond_cutoffs, print_enes=True) return cnf_save_fs, rng_cnf_locs_lst, rng_cnf_locs_path, mod_thy_info
def set_model_filesys(spc_dct_i, level, run_prefix, save_prefix, saddle, name=None, rings='all'): """ Gets filesystem objects for reading many calculations """ # Set the spc_info if saddle: rxn_info = spc_dct_i['rxn_info'] spc_info = rinfo.ts_info(rxn_info) else: spc_info = sinfo.from_dct(spc_dct_i) print('level', level) levelp = tinfo.modify_orb_label(level, spc_info) _root = root_locs(spc_dct_i, saddle=saddle, name=name) cnf_run_fs, cnf_save_fs = build_fs( run_prefix, save_prefix, 'CONFORMER', thy_locs=levelp[1:], **_root) if rings == 'min': min_rngs_locs, min_rngs_path = mincnf.min_energy_conformer_locators( cnf_save_fs, levelp) cnf_run_fs[-1].create(min_rngs_locs) else: min_rngs_locs, min_rngs_path = mincnf.conformer_locators( cnf_save_fs, levelp, cnf_range='r100') for min_locs in min_rngs_locs: cnf_run_fs[-1].create(min_locs) print('model filesys', min_rngs_locs, min_rngs_path) # Create run fs if that directory has been deleted to run the jobs return [cnf_save_fs, min_rngs_path, min_rngs_locs, '', cnf_run_fs]
def build_rotors(spc_dct_i, pf_filesystems, spc_mod_dct_i, read_potentials=True): """ Add more rotor info """ run_prefix = pf_filesystems['run_prefix'] spc_info = sinfo.from_dct(spc_dct_i) spc_fml = automol.inchi.formula_string(spc_info[0]) if spc_fml is None: spc_fml = 'TS' run_path = job_path(run_prefix, 'PROJROT', 'FREQ', spc_fml, locs_idx=None) # Set up tors level filesystem and model and level tors_model = spc_mod_dct_i['tors']['mod'] tors_ene_info = spc_mod_dct_i['tors']['enelvl'][1][1] mod_tors_ene_info = tinfo.modify_orb_label( tors_ene_info, sinfo.from_dct(spc_dct_i)) rotors = None if pf_filesystems['tors'] is not None: [cnf_fs, cnf_save_path, min_cnf_locs, _, _] = pf_filesystems['tors'] # Build the rotors ref_ene = filesys.read.energy(cnf_fs, min_cnf_locs, mod_tors_ene_info) zma_fs = fs.zmatrix(cnf_fs[-1].path(min_cnf_locs)) if ( zma_fs[-1].file.torsions.exists([0]) and zma_fs[-1].file.zmatrix.exists([0]) and tors_model != 'rigid' ): rotors = automol.rotor.from_data( zma=zma_fs[-1].file.zmatrix.read([0]), tors_inf_dct=zma_fs[-1].file.torsions.read([0]), tors_names=spc_dct_i.get('tors_names', None), multi=bool('1d' in tors_model)) # Read the potential grids if read_potentials and rotors is not None: rotors = _read_potentials( rotors, spc_dct_i, run_path, cnf_save_path, ref_ene, mod_tors_ene_info, tors_model) return rotors
def set_model_filesys(spc_dct_i, level, run_prefix, save_prefix, saddle, name=None, cnf_range='min', spc_locs=None, nprocs=1): """ Gets filesystem objects for reading many calculations """ # Set the spc_info if saddle: rxn_info = spc_dct_i['rxn_info'] spc_info = rinfo.ts_info(rxn_info) else: spc_info = sinfo.from_dct(spc_dct_i) levelp = tinfo.modify_orb_label(level, spc_info) _root = root_locs(spc_dct_i, saddle=saddle, name=name) cnf_run_fs, cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', thy_locs=levelp[1:], **_root) hbond_cutoffs = spc_dct_i['hbond_cutoffs'] if cnf_range == 'specified': min_rngs_locs = spc_locs min_rngs_path = cnf_save_fs[-1].path(min_rngs_locs) cnf_run_fs[-1].create(min_rngs_locs) elif cnf_range == 'min': min_rngs_locs, min_rngs_path = min_energy_conformer_locators( cnf_save_fs, levelp, hbond_cutoffs=hbond_cutoffs) cnf_run_fs[-1].create(min_rngs_locs) else: min_rngs_locs_lst, min_rngs_path_lst = conformer_locators( cnf_save_fs, levelp, cnf_range=cnf_range, hbond_cutoffs=hbond_cutoffs, nprocs=nprocs) for min_locs in min_rngs_locs_lst: cnf_run_fs[-1].create(min_locs) min_rngs_locs = min_rngs_locs_lst[0] min_rngs_path = min_rngs_path_lst[0] ioprinter.warning_message('Only returning first location in this list') # Create run fs if that directory has been deleted to run the jobs return [cnf_save_fs, min_rngs_path, min_rngs_locs, '', cnf_run_fs]
def electronic_energy(spc_dct_i, pf_filesystems, spc_model_dct_i, conf=None): """ get high level energy at low level optimized geometry """ ioprinter.info_message('- Calculating electronic energy') # spc_dct_i = spc_dct[spc_name] rxn_info = spc_dct_i.get('rxn_info', None) if rxn_info is not None: spc_info = rinfo.ts_info(rxn_info) else: spc_info = sinfo.from_dct(spc_dct_i) # Get the harmonic filesys information if conf: cnf_path = conf[1] else: [_, cnf_path, _, _, _] = pf_filesystems['harm'] # Get the electronic energy levels ene_levels = tuple(val[1] for key, val in spc_model_dct_i['ene'].items() if 'lvl' in key) print('ene levels', ene_levels) # Read the energies from the filesystem e_elec = None if os.path.exists(cnf_path): e_elec = 0.0 # ioprinter.info_message('lvls', ene_levels) for (coeff, level) in ene_levels: # Build SP filesys mod_thy_info = tinfo.modify_orb_label(level, spc_info) sp_save_fs = autofile.fs.single_point(cnf_path) sp_save_fs[-1].create(mod_thy_info[1:4]) # Read the energy sp_path = sp_save_fs[-1].path(mod_thy_info[1:4]) if os.path.exists(sp_path): ioprinter.reading('Energy', sp_path) ene = sp_save_fs[-1].file.energy.read(mod_thy_info[1:4]) e_elec += (coeff * ene) else: ioprinter.warning_message('No energy at path') e_elec = None break else: ioprinter.warning_message('No conformer to calculate the energy') return e_elec
def thermo_paths(spc_dct, spc_locs_dct, spc_mods, run_prefix): """ Set up the path for saving the pf input and output. Placed in a MESSPF, NASA dirs high in run filesys. """ thm_path_dct = {} for spc_name in spc_locs_dct: spc_thm_path_dct = {} spc_info = sinfo.from_dct(spc_dct[spc_name]) spc_formula = automol.inchi.formula_string(spc_info[0]) thm_prefix = [spc_formula, automol.inchi.inchi_key(spc_info[0])] spc_locs_lst = spc_locs_dct[spc_name] for sidx, spc_locs in enumerate(spc_locs_lst, start=1): spc_mod_thm_path_dct = {} for midx, mod in enumerate(spc_mods): idx = sidx * 10 + midx spc_mod_thm_path_dct[mod] = (job_path(run_prefix, 'MESS', 'PF', thm_prefix, locs_idx=idx), job_path(run_prefix, 'THERM', 'NASA', thm_prefix, locs_idx=idx)) spc_mod_thm_path_dct['mod_total'] = (job_path(run_prefix, 'MESS', 'PF', thm_prefix, locs_idx=sidx), job_path(run_prefix, 'THERM', 'NASA', thm_prefix, locs_idx=sidx)) spc_thm_path_dct[tuple(spc_locs)] = spc_mod_thm_path_dct spc_thm_path_dct['spc_total'] = (job_path(run_prefix, 'MESS', 'PF', thm_prefix, locs_idx=0), job_path(run_prefix, 'THERM', 'NASA', thm_prefix, locs_idx=0)) thm_path_dct[spc_name] = spc_thm_path_dct return thm_path_dct
def geom_init(spc_dct, spc_name, thy_dct, es_keyword_dct, run_prefix, save_prefix): """ Execute the task for a species used to seed the filesystem with a reliable initial conformer. :param spc_dct: :type spc_dct: :param spc_name: name of species :type spc_name: str :param thy_dct: :type thy_dct: :param es_keyword_dct: keyword-val pairs for electronic structure task :type es_keyword_dct: dict[str:str] :param run_prefix: root-path to the run-filesystem :type run_prefix: str :param save_prefix: root-path to the save-filesystem :type save_prefix: str """ spc_dct_i = spc_dct[spc_name] spc_info = sinfo.from_dct(spc_dct_i) # Get the theory info method_dct = thy_dct.get(es_keyword_dct['runlvl']) ini_method_dct = thy_dct.get(es_keyword_dct['inplvl']) thy_info = tinfo.from_dct(method_dct) ini_thy_info = tinfo.from_dct(ini_method_dct) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) mod_ini_thy_info = tinfo.modify_orb_label(ini_thy_info, spc_info) # Set the filesystem objects _, ini_cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', spc_locs=spc_info, thy_locs=mod_ini_thy_info[1:]) cnf_run_fs, cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', spc_locs=spc_info, thy_locs=mod_thy_info[1:]) # Get a reference geometry if one not found success = conformer.initial_conformer(spc_dct_i, spc_info, ini_method_dct, method_dct, ini_cnf_save_fs, cnf_run_fs, cnf_save_fs, es_keyword_dct) return success
def root_locs(spc_dct_i, saddle=False, name=None): """ Set the root locatores for the species and TS """ if not saddle: spc_info = sinfo.from_dct(spc_dct_i) rxn_info = None ts_info = None else: spc_info = None rxn_info = rinfo.sort(spc_dct_i['rxn_info']) ts_num = int(name.split('_')[-1]) ts_info = (ts_num, ) return {'spc_locs': spc_info, 'rxn_locs': rxn_info, 'ts_locs': ts_info}
def set_bath(spc_dct, etrans_dct): """ Build nfo object for the bath """ # Try to obtain bath set by the user, otherwise use N2 bath_name = etrans_dct.get('bath', None) bath_dct = spc_dct.get(bath_name, None) if bath_dct is not None: bath_info = sinfo.from_dct(bath_dct) ioprinter.info_message(f' - Using bath {bath_name} input by user') else: bath_info = ['InChI=1S/Ar', 0, 1] ioprinter.info_message(' - No bath provided, using Argon as bath') return bath_info
def geom_init(spc_dct, spc_name, thy_dct, es_keyword_dct, run_prefix, save_prefix): """ Find the initial geometry """ spc_dct_i = spc_dct[spc_name] spc_info = sinfo.from_dct(spc_dct_i) # Get the theory info method_dct = thy_dct.get(es_keyword_dct['runlvl']) ini_method_dct = thy_dct.get(es_keyword_dct['inplvl']) thy_info = tinfo.from_dct(method_dct) ini_thy_info = tinfo.from_dct(ini_method_dct) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) mod_ini_thy_info = tinfo.modify_orb_label(ini_thy_info, spc_info) # Set the filesystem objects _, ini_cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', spc_locs=spc_info, thy_locs=mod_ini_thy_info[1:]) cnf_run_fs, cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', spc_locs=spc_info, thy_locs=mod_thy_info[1:]) # _, ini_cnf_save_fs = build_fs( # run_prefix, save_prefix, 'CONFORMER', # spc_locs=spc_info, thy_locs=mod_ini_thy_info[1:]) # cnf_run_fs, cnf_save_fs = build_fs( # run_prefix, save_prefix, 'CONFORMER', # spc_locs=spc_info, thy_locs=mod_thy_info[1:]) _, instab_save_fs = build_fs(run_prefix, save_prefix, 'INSTAB', spc_locs=spc_info, thy_locs=mod_thy_info[1:]) # Get a reference geometry if one not found success = conformer.initial_conformer(spc_dct_i, spc_info, ini_method_dct, method_dct, ini_cnf_save_fs, cnf_run_fs, cnf_save_fs, instab_save_fs, es_keyword_dct) return success
def conformer_list_from_models(print_keyword_dct, save_prefix, run_prefix, spc_dct_i, spc_mod_dct_i): """ Create a list of conformers based on the species name and model.dat info """ # conformer range cnf_range = _set_conf_range(print_keyword_dct) # thy_info build thy_info = spc_mod_dct_i['geo'][1] spc_info = sinfo.from_dct(spc_dct_i) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) _root = filesys.root_locs(spc_dct_i, saddle=False) _, cnf_save_fs = filesys.build_fs(run_prefix, save_prefix, 'CONFORMER', thy_locs=mod_thy_info[1:], **_root) rng_cnf_locs_lst, rng_cnf_locs_path = filesys.mincnf.conformer_locators( cnf_save_fs, mod_thy_info, cnf_range=cnf_range) return cnf_save_fs, rng_cnf_locs_lst, rng_cnf_locs_path
def root_locs(spc_dct_i, saddle=False, name=None): """ Set the root locatores for the species and TS """ if not saddle: spc_info = sinfo.from_dct(spc_dct_i) rxn_info = None ts_info = None else: spc_info = None rxn_info = rinfo.sort(spc_dct_i['rxn_info']) ts_num = int(name.split('_')[-1]) ts_info = (ts_num, ) # if 'ts_locs' in spc_dct_i: # ts_info = spc_dct_i['ts_locs'] # else: # ts_num = int(name.split('_')[-1]) # ts_info = (ts_num,) # print('TEST ts locs:', ts_info) # ts_info = (0,) # may be more complicated # ts_info = () return {'spc_locs': spc_info, 'rxn_locs': rxn_info, 'ts_locs': ts_info}
def energy(spc_name, spc_dct_i, spc_mod_dct_i, proc_keyword_dct, thy_dct, locs, locs_path, cnf_fs, run_prefix, save_prefix): """ collect energy """ saddle = 'ts_' in spc_name _ene = None if spc_mod_dct_i: pf_filesystems = filesys.models.pf_filesys(spc_dct_i, spc_mod_dct_i, run_prefix, save_prefix, name=spc_name, saddle=saddle) _ene = ene.electronic_energy(spc_dct_i, pf_filesystems, spc_mod_dct_i, conf=(locs, locs_path, cnf_fs)) else: spc_info = sinfo.from_dct(spc_dct_i) thy_info = tinfo.from_dct(thy_dct.get(proc_keyword_dct['proplvl'])) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) sp_save_fs = autofile.fs.single_point(locs_path) # Read the energy sp_path = sp_save_fs[-1].path(mod_thy_info[1:4]) if os.path.exists(sp_path): if sp_save_fs[-1].file.energy.exists(mod_thy_info[1:4]): ioprinter.reading('Energy', sp_path) _ene = sp_save_fs[-1].file.energy.read(mod_thy_info[1:4]) if _ene is not None: miss_data = None else: miss_data = (spc_name + '_'.join(locs), mod_thy_info, 'energy') return [locs_path, _ene], miss_data
def _get_prop_fs(spc_model_dct_i, spc_dct_i, prop, sort_info_lst, run_prefix, save_prefix, saddle=False, name=None): """ Get filesystem info for a property in the spc model dct """ if saddle: rxn_info = spc_dct_i['rxn_info'] spc_info = rinfo.ts_info(rxn_info) else: spc_info = sinfo.from_dct(spc_dct_i) level = spc_model_dct_i[prop]['geolvl'][1][1] levelp = tinfo.modify_orb_label(level, spc_info) mod_info_lst = [] if sort_info_lst is not None: for info in sort_info_lst[:2]: if info is not None: mod_info_lst.append(tinfo.modify_orb_label(info, spc_info)) else: mod_info_lst.append(info) for info in sort_info_lst[2:]: mod_info_lst.append(info) _root = root_locs(spc_dct_i, saddle=saddle, name=name) cnf_run_fs, cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', thy_locs=levelp[1:], **_root) return cnf_run_fs, cnf_save_fs, levelp, mod_info_lst
def instability_transformation(spc_dct, spc_name, thy_info, save_prefix, zma_locs=(0,)): """ see if a species and unstable and handle task management """ spc_info = sinfo.from_dct(spc_dct[spc_name]) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) _, zma_save_fs = build_fs( '', save_prefix, 'ZMATRIX', spc_locs=spc_info, thy_locs=mod_thy_info[1:], instab_locs=()) # Check if the instability files exist if zma_save_fs[-1].file.reaction.exists(zma_locs): zrxn = zma_save_fs[-1].file.reaction.read(zma_locs) zma = zma_save_fs[-1].file.zmatrix.read(zma_locs) _instab = (zrxn, zma) path = zma_save_fs[-1].file.zmatrix.path(zma_locs) else: _instab = None path = None return _instab, path
def mol_data(spc_name, spc_dct, pes_mod_dct_i, spc_mod_dct_i, chn_basis_ene_dct, run_prefix, save_prefix, calc_chn_ene=True, zrxn=None, spc_locs=None): """ Reads all required data from the SAVE filesystem for a molecule. Stores data into an info dictionary. All of the data that is read is determined by the models that are described in the pes and spc model dictionaries. :param spc_dct: :type spc_dct: :param pes_mod_dct_i: keyword dict of specific PES model :type pes_mod_dct_i: dict[] :param spc_mod_dct_i: keyword dict of specific species model :type spc_mod_dct_i: dict[] :param run_prefix: root-path to the run-filesystem :type run_prefix: str :param save_prefix: root-path to the save-filesystem :type save_prefix: str :rtype: dict[] """ spc_dct_i = spc_dct[spc_name] ene_chnlvl = None ene_reflvl = None zpe = None hf0k = None hf0k_trs = None hf0k = None # Initialize all of the elements of the inf dct geom, sym_factor, freqs, imag, elec_levels = None, None, None, None, None allr_str, mdhr_dat = '', '' xmat, rovib_coups, rot_dists = None, None, None # Set up all the filesystem objects using models and levels pf_filesystems = filesys.models.pf_filesys(spc_dct_i, spc_mod_dct_i, run_prefix, save_prefix, zrxn is not None, name=spc_name, spc_locs=spc_locs) # Obtain rotation partition function information ioprinter.info_message('Obtaining info for rotation partition function...', newline=1) geom = rot.read_geom(pf_filesystems) if typ.nonrigid_rotations(spc_mod_dct_i): rovib_coups, rot_dists = rot.read_rotational_values(pf_filesystems) # Obtain vibration partition function information ioprinter.info_message( 'Preparing internal rotor info building partition functions...', newline=1) rotors = tors.build_rotors(spc_dct_i, pf_filesystems, spc_mod_dct_i) ioprinter.info_message( 'Obtaining the vibrational frequencies and zpves...', newline=1) freqs, imag, zpe, _, tors_strs, _, _, _ = vib.full_vib_analysis( spc_dct_i, pf_filesystems, spc_mod_dct_i, run_prefix, zrxn=zrxn) allr_str = tors_strs[0] # ioprinter.info_message('zpe in mol_data test:', zpe) if typ.anharm_vib(spc_mod_dct_i): xmat = vib.read_anharmon_matrix(pf_filesystems) # Obtain symmetry factor ioprinter.info_message('Determining the symmetry factor...', newline=1) zma = None if zrxn: [_, cnf_save_path, _, _, _] = pf_filesystems['harm'] # Build the rotors if cnf_save_path: zma_fs = autofile.fs.zmatrix(cnf_save_path) zma = zma_fs[-1].file.zmatrix.read([0]) sym_factor = symm.symmetry_factor(pf_filesystems, spc_mod_dct_i, spc_dct_i, rotors, grxn=zrxn, zma=zma) # Obtain electronic energy levels elec_levels = spc_dct_i['elec_levels'] # Obtain energy levels ioprinter.info_message('Obtaining the electronic energy + zpve...', newline=1) if calc_chn_ene: chn_ene = ene.read_energy(spc_dct_i, pf_filesystems, spc_mod_dct_i, run_prefix, read_ene=True, read_zpe=False, saddle=zrxn is not None) ene_chnlvl = chn_ene + zpe zma = None # Determine info about the basis species used in thermochem calcs hf0k, hf0k_trs, chn_basis_ene_dct, _ = basis.enthalpy_calculation( spc_dct, spc_name, ene_chnlvl, chn_basis_ene_dct, pes_mod_dct_i, spc_mod_dct_i, run_prefix, save_prefix, zrxn=zrxn) ene_reflvl = None # Build the energy transfer section strings if zrxn is None: ioprinter.info_message('Determining energy transfer parameters...', newline=1) well_info = sinfo.from_dct(spc_dct_i) # ioprinter.debug_message('well_inf', well_info) # bath_info = ['InChI=1S/N2/c1-2', 0, 1] # how to do... bath_info = ['InChI=1S/Ar', 0, 1] # how to do... etrans_dct = etrans.build_etrans_dct(spc_dct_i) edown_str, collid_freq_str = etrans.make_energy_transfer_strs( well_info, bath_info, etrans_dct) else: edown_str, collid_freq_str = None, None # Create info dictionary keys = [ 'geom', 'sym_factor', 'freqs', 'imag', 'elec_levels', 'mess_hr_str', 'mdhr_dat', 'xmat', 'rovib_coups', 'rot_dists', 'ene_chnlvl', 'ene_reflvl', 'zpe_chnlvl', 'ene_tsref', 'edown_str', 'collid_freq_str' ] vals = [ geom, sym_factor, freqs, imag, elec_levels, allr_str, mdhr_dat, xmat, rovib_coups, rot_dists, hf0k, ene_reflvl, zpe, hf0k_trs, edown_str, collid_freq_str ] inf_dct = dict(zip(keys, vals)) return inf_dct, chn_basis_ene_dct
def tau_tsk(job, spc_dct, spc_name, thy_dct, es_keyword_dct, run_prefix, save_prefix): """ Energies, gradients, and hessians, for set of arbitrarily sampled torsional coordinates with all other coordinates optimized """ spc_dct_i = spc_dct[spc_name] # Set the spc_info spc_info = sinfo.from_dct(spc_dct_i) # Get es options overwrite = es_keyword_dct['overwrite'] retryfail = es_keyword_dct['retryfail'] # scan_increment = spc_dct_i['hind_inc'] nsamp_par = spc_dct_i['tau_nsamp'] # Modify the theory method_dct = thy_dct.get(es_keyword_dct['runlvl']) ini_method_dct = thy_dct.get(es_keyword_dct['inplvl']) thy_info = tinfo.from_dct(method_dct) ini_thy_info = tinfo.from_dct(ini_method_dct) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) mod_ini_thy_info = tinfo.modify_orb_label(ini_thy_info, spc_info) # Script script_str, kwargs = qchem_params(method_dct, elstruct.Job.OPTIMIZATION) # Set the filesystem objects for thy info _, ini_cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', spc_locs=spc_info, thy_locs=mod_ini_thy_info[1:]) ini_loc_info = filesys.mincnf.min_energy_conformer_locators( ini_cnf_save_fs, mod_ini_thy_info) ini_min_cnf_locs, ini_min_cnf_path = ini_loc_info ini_min_rid, ini_min_cid = ini_min_cnf_locs ini_zma_save_fs = autofile.fs.zmatrix(ini_min_cnf_path) geo = ini_cnf_save_fs[-1].file.geometry.read(ini_min_cnf_locs) zma = ini_zma_save_fs[-1].file.zmatrix.read((0, )) ini_sp_save_fs = autofile.fs.single_point(ini_min_cnf_path) if ini_sp_save_fs[-1].file.energy.exists(mod_ini_thy_info[1:4]): ref_ene = ini_sp_save_fs[-1].file.energy.read(mod_ini_thy_info[1:4]) else: ref_ene = ini_cnf_save_fs[-1].file.energy.read(ini_min_cnf_locs) # Get the tors names ini_zma_save_fs = autofile.fs.zmatrix(ini_min_cnf_path) if ini_zma_save_fs[-1].file.torsions.exists([0]): tors_dct = ini_zma_save_fs[-1].file.torsions.read([0]) torsions = automol.rotor.from_data(zma, tors_dct) else: torsions = () saddle = bool('ts_' in spc_name) # Run the task if any torsions exist if torsions and not saddle: # Set up tau filesystem objects tau_run_fs, tau_save_fs = build_fs(run_prefix, save_prefix, 'TAU', spc_locs=spc_info, thy_locs=mod_thy_info[1:]) # db_style = 'jsondb' db_style = 'directory' if db_style == 'jsondb': tau_save_fs[-1].root.create() tau_save_fs[-1].json_create() for locs in tau_save_fs[-1].existing(): if tau_save_fs[-1].file.geometry.exists(locs): geol = tau_save_fs[-1].file.geometry.read(locs) tau_save_fs[-1].json.geometry.write(geol, locs) if tau_save_fs[-1].file.energy.exists(locs): enel = tau_save_fs[-1].file.energy.read(locs) tau_save_fs[-1].json.energy.write(enel, locs) if tau_save_fs[-1].file.geometry_info.exists(locs): geo_infl = tau_save_fs[-1].file.geometry_info.read(locs) tau_save_fs[-1].json.geometry_info.write(geo_infl, locs) if tau_save_fs[-1].file.geometry_input.exists(locs): inp_strl = tau_save_fs[-1].file.geometry_input.read(locs) tau_save_fs[-1].json.geometry_input.write(inp_strl, locs) if tau_save_fs[-1].file.gradient_input.exists(locs): inp_strl = tau_save_fs[-1].file.gradient_input.read(locs) tau_save_fs[-1].json.gradient_input.write(inp_strl, locs) if tau_save_fs[-1].file.hessian_input.exists(locs): inp_strl = tau_save_fs[-1].file.hessian_input.read(locs) tau_save_fs[-1].json.hessian_input.write(inp_strl, locs) if tau_save_fs[-1].file.gradient_info.exists(locs): inf_objl = tau_save_fs[-1].file.gradient_info.read(locs) tau_save_fs[-1].json.gradient_info.write(inf_objl, locs) if tau_save_fs[-1].file.hessian_info.exists(locs): inf_objl = tau_save_fs[-1].file.hessian_info.read(locs) tau_save_fs[-1].json.hessian_info.write(inf_objl, locs) if tau_save_fs[-1].file.gradient.exists(locs): gradl = tau_save_fs[-1].file.gradient.read(locs) tau_save_fs[-1].json.gradient.write(gradl, locs) if tau_save_fs[-1].file.hessian.exists(locs): hessl = tau_save_fs[-1].file.hessian.read(locs) tau_save_fs[-1].json.energy.hessian(hessl, locs) if tau_save_fs[-1].file.zmatrix.exists(locs): zmatl = tau_save_fs[-1].file.zmatrix.read(locs) tau_save_fs[-1].json.zmatrix.write(zmatl, locs) if tau_save_fs[-1].file.harmonic_frequencies.exists(locs): hfreql = tau_save_fs[-1].file.harmonic_frequencies.read( locs) tau_save_fs[-1].json.harmonic_frequencies.write( hfreql, locs) save_path = tau_save_fs[-1].path(locs) sp_save_fs = autofile.fs.single_point(save_path) sp_save_locs = sp_save_fs[-1].existing() save_path = tau_save_fs[-1].root.path() jsp_save_fs = autofile.fs.single_point(save_path, json_layer=locs) for sp_locs in sp_save_locs: if sp_save_fs[-1].file.energy.exists(sp_locs): enel = sp_save_fs[-1].file.energy.read(sp_locs) jsp_save_fs[-1].json.energy.write(enel, sp_locs) if sp_save_fs[-1].file.input.exists(sp_locs): inp_strl = sp_save_fs[-1].file.input.read(sp_locs) jsp_save_fs[-1].json.input.write(inp_strl, sp_locs) if sp_save_fs[-1].file.info.exists(sp_locs): inf_objl = sp_save_fs[-1].file.info.read(sp_locs) jsp_save_fs[-1].json.info.write(inf_objl, sp_locs) if job == 'samp': # Set up the script script_str, kwargs = qchem_params(method_dct, elstruct.Job.OPTIMIZATION) tors_names = automol.rotor.names(torsions, flat=True) # Run sampling tau.tau_sampling(zma, ref_ene, spc_info, tors_names, nsamp_par, mod_ini_thy_info, tau_run_fs, tau_save_fs, script_str, overwrite, saddle=saddle, **kwargs) elif job in ('energy', 'grad'): # Set up the run scripts script_str, kwargs = qchem_params(method_dct) # Run the job over all the conformers requested by the user for locs in tau_save_fs[-1].existing(): geo_run_path = tau_run_fs[-1].path(locs) if db_style == 'jsondb': geo_save_path = tau_save_fs[-1].root.path() geo = tau_save_fs[-1].json.geometry.read(locs) elif db_style == 'directory': geo_save_path = tau_save_fs[-1].path(locs) geo = tau_save_fs[-1].file.geometry.read(locs) tau_run_fs[-1].create(locs) zma = None ES_TSKS[job](zma, geo, spc_info, mod_thy_info, tau_save_fs, geo_run_path, geo_save_path, locs, script_str, overwrite, retryfail=retryfail, **kwargs) ioprinter.obj('vspace') elif job == 'hess': # Add the hessian max hessmax = es_keyword_dct['hessmax'] # Set up the run scripts script_str, kwargs = qchem_params(method_dct) # Run the job over all the conformers requested by the user hess_cnt = 0 for locs in tau_save_fs.existing(): ioprinter.info_message('HESS Number {}'.format(hess_cnt + 1), newline=1) geo_run_path = tau_run_fs[-1].path(locs) if db_style == 'directory': geo_save_path = tau_save_fs[-1].path(locs) if tau_save_fs[-1].file.hessian.exists(locs): ioprinter.existing_path('Hessian', geo_save_path) hess_cnt += 1 continue geo = tau_save_fs[-1].file.geometry.read(locs) elif db_style == 'jsondb': geo_save_path = tau_save_fs[-1].root.path() if tau_save_fs[-1].json.hessian.exists(locs): ioprinter.existing_path('Hessian', geo_save_path) hess_cnt += 1 continue geo = tau_save_fs[-1].json.geometry.read(locs) zma = None tau_run_fs[-1].create(locs) ES_TSKS[job](zma, geo, spc_info, mod_thy_info, tau_save_fs, geo_run_path, geo_save_path, locs, script_str, overwrite, retryfail=retryfail, **kwargs) hess_cnt += 1 if hess_cnt == hessmax: break else: ioprinter.info_message('No torsional modes in the species')
def hr_tsk(job, spc_dct, spc_name, thy_dct, es_keyword_dct, run_prefix, save_prefix): """ run a scan over the specified torsional coordinates """ spc_dct_i = spc_dct[spc_name] saddle = bool('ts_' in spc_name) # Set the spc_info if not saddle: spc_info = sinfo.from_dct(spc_dct_i) else: spc_info = rinfo.ts_info(spc_dct_i['rxn_info']) # Modify the theory method_dct = thy_dct.get(es_keyword_dct['runlvl']) ini_method_dct = thy_dct.get(es_keyword_dct['inplvl']) thy_info = tinfo.from_dct(method_dct) ini_thy_info = tinfo.from_dct(ini_method_dct) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) mod_ini_thy_info = tinfo.modify_orb_label(ini_thy_info, spc_info) # Set the filesystem objects _root = root_locs(spc_dct_i, saddle=saddle, name=spc_name) ini_cnf_run_fs, ini_cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', thy_locs=mod_ini_thy_info[1:], **_root) cnf_run_fs, cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', thy_locs=mod_thy_info[1:], **_root) instab_save_fs = () ini_loc_info = filesys.mincnf.min_energy_conformer_locators( ini_cnf_save_fs, mod_ini_thy_info) ini_min_locs, ini_cnf_save_path = ini_loc_info # ini_min_rng_locs, ini_min_cnf_locs = ini_min_cnf_locs # ini_min_rng_path, ini_min_cnf_path = ini_min_cnf_path # Create run fs if that directory has been deleted to run the jobs ini_cnf_run_fs[-1].create(ini_min_locs) ini_cnf_run_path = ini_cnf_run_fs[-1].path(ini_min_locs) # Get options from the dct or es options lst overwrite = es_keyword_dct['overwrite'] retryfail = es_keyword_dct['retryfail'] tors_model = es_keyword_dct['tors_model'] # Read zma, geo, and torsions ini_zma_save_fs = autofile.fs.zmatrix(ini_cnf_save_path) geo = ini_cnf_save_fs[-1].file.geometry.read(ini_min_locs) zma = ini_zma_save_fs[-1].file.zmatrix.read((0, )) if ini_zma_save_fs[-1].file.torsions.exists([0]): tors_dct = ini_zma_save_fs[-1].file.torsions.read([0]) torsions = automol.rotor.from_data( zma, tors_dct, ) else: torsions = () # Run the task if any torsions exist if any(torsions): scn = 'SCAN' if 'fa' not in tors_model else 'CSCAN' ini_scn_run_fs, ini_scn_save_fs = build_fs(ini_cnf_run_path, ini_cnf_save_path, scn, zma_locs=(0, )) if job == 'scan': increment = spc_dct_i.get('hind_inc', 30.0 * phycon.DEG2RAD) hr.hindered_rotor_scans(zma, spc_info, mod_thy_info, instab_save_fs, ini_scn_run_fs, ini_scn_save_fs, torsions, tors_model, method_dct, overwrite, saddle=saddle, increment=increment, retryfail=retryfail) # elif job == 'reopt': # # pull stuff from dcts # two_stage = saddle # rxn_class = spc_dct_i['class'] if saddle else '' # mc_nsamp = spc_dct_i['mc_nsamp'] # ethresh = es_keyword_dct['hrthresh'] # # Read and print the potential # sp_fs = autofile.fs.single_point(ini_cnf_save_path) # ref_ene = sp_fs[-1].file.energy.read(mod_ini_thy_info[1:4]) # # ref_ene = ini_cnf_save_fs[-1].file.energy.read(ini_min_cnf_locs) # tors_pots, tors_zmas, tors_paths = {}, {}, {} # for tors_names, tors_grids in zip(run_tors_names, run_tors_grids): # constraint_dct = automol.zmat.build_constraint_dct( # zma, const_names, tors_names) # pot, _, _, _, zmas, paths = filesys.read.potential( # tors_names, tors_grids, # ini_cnf_save_path, # mod_ini_thy_info, ref_ene, # constraint_dct, # read_zma=True) # tors_pots[tors_names] = pot # tors_zmas[tors_names] = zmas # tors_paths[tors_names] = paths # # Check for new minimum conformer # new_min_zma = __.check_hr_pot( # tors_pots, tors_zmas, tors_paths, emax=ethresh) # if new_min_zma is not None: # ioprinter.info_message( # 'Finding new low energy conformer...', newline=1) # conformer.single_conformer( # zma, spc_info, mod_thy_info, # ini_cnf_run_fs, ini_cnf_save_fs, # script_str, overwrite, # retryfail=retryfail, rxn=rxn, **kwargs) elif job in ('energy', 'grad', 'hess', 'vpt2'): # Script (add energy script call) script_str, kwargs = qchem_params(method_dct) run_tors_names = automol.rotor.names(torsions, flat=True) for tors_names in run_tors_names: # Set the constraint dct and filesys for the scan const_names = automol.zmat.set_constraint_names( zma, run_tors_names, tors_model) constraint_dct = automol.zmat.build_constraint_dct( zma, const_names, tors_names) # get the scn_locs, maybe get a function? scn_locs = () for locs in scn_locs: geo_run_path = ini_scn_run_fs[-1].path(locs) geo_save_path = ini_scn_save_fs[-1].path(locs) geo = ini_scn_save_fs[-1].file.geometry.read(locs) zma = ini_scn_save_fs[-1].file.zmatrix.read(locs) ini_scn_run_fs[-1].create(locs) ES_TSKS[job](zma, geo, spc_info, mod_thy_info, ini_scn_save_fs, geo_run_path, geo_save_path, locs, script_str, overwrite, retryfail=retryfail, **kwargs) ioprinter.obj('vspace') else: ioprinter.info_message('No torsional modes in the species')
def rpath_tsk(job, spc_dct, spc_name, thy_dct, es_keyword_dct, run_prefix, save_prefix): """ run a scan over the specified torsional coordinates """ # Get dct for specific species task is run for spc_dct_i = spc_dct[spc_name] # Set up coordinate name rxn_coord = es_keyword_dct.get('rxn_coord') if rxn_coord == 'auto': coord_name = ['Rn'] # grab from zrxn object else: # coord_name = coord_name = ['IRC'] # Set the spc_info spc_info = sinfo.from_dct(spc_dct_i) # Modify the theory method_dct = thy_dct.get(es_keyword_dct['runlvl']) ini_method_dct = thy_dct.get(es_keyword_dct['inplvl']) thy_info = tinfo.from_dct(method_dct) ini_thy_info = tinfo.from_dct(ini_method_dct) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) mod_ini_thy_info = tinfo.modify_orb_label(ini_thy_info, spc_info) # Get options from the dct or es options lst overwrite = es_keyword_dct['overwrite'] # retryfail = es_keyword_dct['retryfail'] # Set up the script script_str, kwargs = qchem_params(method_dct, elstruct.Job.OPTIMIZATION) # Set the filesystem objects rxn_info = spc_dct_i['rxn_info'] fs_rxn_info = rinfo.sort(rxn_info) # New filesystem objects if coord_name == 'irc': _root = root_locs(spc_dct_i, saddle=True) # ini_cnf_run_fs, ini_cnf_save_fs = build_fs( # run_prefix, save_prefix, 'CONFORMER', # thy_locs=mod_ini_thy_info[1:], # **_root) # cnf_run_fs, cnf_save_fs = build_fs( # run_prefix, save_prefix, 'CONFORMER', # thy_locs=mod_thy_info[1:], # **_root) ini_cnf_run_fs, ini_cnf_save_fs = build_fs( run_prefix, save_prefix, 'CONFORMER', thy_locs=mod_ini_thy_info[1:], **_root) cnf_run_fs, cnf_save_fs = build_fs(run_prefix, save_prefix, 'CONFORMER', thy_locs=mod_thy_info[1:], **_root) ini_loc_info = filesys.mincnf.min_energy_conformer_locators( ini_cnf_save_fs, mod_ini_thy_info) ini_min_locs, ini_pfx_save_path = ini_loc_info # ini_min_rng_locs, ini_min_cnf_locs = ini_min_cnf_locs # ini_min_rng_path, ini_min_cnf_path = ini_min_cnf_path ini_cnf_run_fs[-1].create(ini_min_locs) ini_pfx_run_path = ini_cnf_run_fs[-1].path(ini_min_locs) else: ts_info = (ts_num, ) ini_ts_run_fs, ini_ts_save_fs = build_fs(run_prefix, save_prefix, 'TS', thy_locs=mod_ini_thy_info[1:], **_root) ini_pfx_run_path = ini_ts_run_fs.path(ts_info) ini_pfx_save_path = ini_ts_save_fs.path(ts_info) # Get options from the dct or es options lst overwrite = es_keyword_dct['overwrite'] ini_scn_run_fs, ini_scn_save_fs = build_fs(ini_pfx_run_path, ini_pfx_save_path, 'SCAN', zma_locs=(0, )) ini_zma_save_fs = autofile.fs.zmatrix(ini_cnf_save_path) geo = ini_cnf_save_fs[-1].file.geometry.read(ini_min_locs) zma = ini_zma_save_fs[-1].file.zmatrix.read((0, )) # Run job if job == 'scan': if rcoord == 'auto': pass elif rcoord == 'irc': rpath.irc_scan(geo, spc_info, coord_name, mod_ini_thy_info, ini_method_dct, ini_scn_save_fs, ini_cnf_run_path, overwrite) elif job in ('energy', 'grad', 'hess'): # Script script_str, kwargs = qchem_params(method_dct) # Need to put in something with the IRC idxs for locs in ini_scn_save_fs[-1].existing(): geo_run_path = ini_scn_run_fs[-1].path(locs) geo_save_path = ini_scn_save_fs[-1].path(locs) geo = ini_scn_save_fs[-1].file.geometry.read(locs) zma = None ini_scn_run_fs[-1].create(locs) ES_TSKS[job](zma, geo, spc_info, mod_thy_info, ini_scn_save_fs, geo_run_path, geo_save_path, locs, script_str, overwrite, **kwargs) ioprinter.obj('vspace') elif job == 'infene': pass
def run_tsk(tsk, spc_dct, spc_name, thy_dct, proc_keyword_dct, pes_mod_dct_i, spc_mod_dct_i, run_prefix, save_prefix): """ run a proc tess task for generating a list of conformer or tau sampling geometries """ # Print the head of the task ioprinter.output_task_header(tsk) ioprinter.obj('line_dash') ioprinter.output_keyword_list(proc_keyword_dct, thy_dct) # Setup csv data dictionary for specific task csv_data = util.set_csv_data(tsk) chn_basis_ene_dct = {} spc_array = [] # print species ioprinter.obj('line_dash') ioprinter.info_message("Species: ", spc_name) # Heat of formation basis molecules and coefficients # is not conformer specific if 'coeffs' in tsk: thy_info = spc_mod_dct_i['geo'][1] filelabel = 'coeffs' filelabel += '_{}'.format(pes_mod_dct_i['thermfit']['ref_scheme']) filelabel += '.csv' label = spc_name basis_dct, _ = basis.prepare_refs( pes_mod_dct_i['thermfit']['ref_scheme'], spc_dct, (spc_name, )) # Get the basis info for the spc of interest spc_basis, coeff_basis = basis_dct[spc_name] coeff_array = [] for spc_i in spc_basis: if spc_i not in spc_array: spc_array.append(spc_i) for spc_i in spc_array: if spc_i in spc_basis: coeff_array.append(coeff_basis[spc_basis.index(spc_i)]) else: coeff_array.append(0) csv_data[label] = [*coeff_array] else: # unpack spc and level info spc_dct_i = spc_dct[spc_name] if proc_keyword_dct['geolvl']: thy_info = tinfo.from_dct(thy_dct.get(proc_keyword_dct['geolvl'])) else: thy_info = spc_mod_dct_i['geo'][1] # Loop over conformers if proc_keyword_dct['geolvl']: _, rng_cnf_locs_lst, rng_cnf_locs_path = util.conformer_list( proc_keyword_dct, save_prefix, run_prefix, spc_dct_i, thy_dct) spc_mod_dct_i, pf_models = None, None else: ret = util.conformer_list_from_models(proc_keyword_dct, save_prefix, run_prefix, spc_dct_i, thy_dct, spc_mod_dct_i, pf_models) _, rng_cnf_locs_lst, rng_cnf_locs_path = ret for locs, locs_path in zip(rng_cnf_locs_lst, rng_cnf_locs_path): label = spc_name + '_' + '_'.join(locs) _, cnf_fs = filesys.build_fs(run_prefix, save_prefix, 'CONFORMER') if 'freq' in tsk: filelabel = 'freq' if spc_mod_dct_i: filelabel += '_m{}'.format(spc_mod_dct_i['harm'][0]) else: filelabel += '_{}'.format(proc_keyword_dct['geolvl']) filelabel += '.csv' if pf_models: pf_filesystems = filesys.models.pf_filesys(spc_dct_i, spc_mod_dct_i, run_prefix, save_prefix, saddle=False) ret = vib.full_vib_analysis(spc_dct_i, pf_filesystems, spc_mod_dct_i, run_prefix, zrxn=None) freqs, _, tors_zpe, sfactor, torsfreqs, all_freqs = ret csv_data['tfreq'][label] = torsfreqs csv_data['allfreq'][label] = all_freqs csv_data['scalefactor'][label] = [sfactor] else: es_model = util.freq_es_levels(proc_keyword_dct) spc_mod_dct_i = parser.model.pf_level_info( es_model, thy_dct) try: freqs, _, zpe = vib.read_locs_harmonic_freqs( cnf_fs, locs, run_prefix, zrxn=None) except: freqs = [] zpe = 0 tors_zpe = 0.0 spc_data = [] zpe = tors_zpe + (sum(freqs) / 2.0) * phycon.WAVEN2EH if freqs and proc_keyword_dct['scale'] is not None: freqs, zpe = vib.scale_frequencies(freqs, tors_zpe, spc_mod_dct_i, scale_method='3c') spc_data = [locs_path, zpe, *freqs] csv_data['freq'][label] = spc_data elif 'geo' in tsk: filelabel = 'geo' if spc_mod_dct_i: filelabel += '_{}'.format(spc_mod_dct_i['harm']) else: filelabel += '_{}'.format(proc_keyword_dct['geolvl']) filelabel += '.txt' if cnf_fs[-1].file.geometry.exists(locs): geo = cnf_fs[-1].file.geometry.read(locs) energy = cnf_fs[-1].file.energy.read(locs) comment = 'energy: {0:>15.10f}'.format(energy) xyz_str = automol.geom.xyz_string(geo, comment=comment) else: xyz_str = '\t -- Missing --' spc_data = '\n\nSPC: {}\tConf: {}\tPath: {}\n'.format( spc_name, locs, locs_path) + xyz_str csv_data[label] = spc_data elif 'zma' in tsk: filelabel = 'zmat' if spc_mod_dct_i: filelabel += '_{}'.format(spc_mod_dct_i['harm']) else: filelabel += '_{}'.format(proc_keyword_dct['geolvl']) filelabel += '.txt' geo = cnf_fs[-1].file.geometry.read(locs) zma = automol.geom.zmatrix(geo) energy = cnf_fs[-1].file.energy.read(locs) comment = 'energy: {0:>15.10f}\n'.format(energy) zma_str = automol.zmat.string(zma) spc_data = '\n\nSPC: {}\tConf: {}\tPath: {}\n'.format( spc_name, locs, locs_path) + comment + zma_str csv_data[label] = spc_data elif 'ene' in tsk: filelabel = 'ene' if spc_mod_dct_i: filelabel += '_{}'.format(spc_mod_dct_i['harm']) filelabel += '_{}'.format(spc_mod_dct_i['ene']) else: filelabel += '_{}'.format(proc_keyword_dct['geolvl']) filelabel += '_{}'.format(proc_keyword_dct['proplvl']) filelabel += '.csv' energy = None if spc_mod_dct_i: pf_filesystems = filesys.models.pf_filesys(spc_dct_i, spc_mod_dct_i, run_prefix, save_prefix, saddle=False) energy = ene.electronic_energy(spc_dct_i, pf_filesystems, spc_mod_dct_i, conf=(locs, locs_path, cnf_fs)) else: spc_info = sinfo.from_dct(spc_dct_i) thy_info = tinfo.from_dct( thy_dct.get(proc_keyword_dct['proplvl'])) mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) sp_save_fs = autofile.fs.single_point(locs_path) sp_save_fs[-1].create(mod_thy_info[1:4]) # Read the energy sp_path = sp_save_fs[-1].path(mod_thy_info[1:4]) if os.path.exists(sp_path): if sp_save_fs[-1].file.energy.exists( mod_thy_info[1:4]): ioprinter.reading('Energy', sp_path) energy = sp_save_fs[-1].file.energy.read( mod_thy_info[1:4]) csv_data[label] = [locs_path, energy] elif 'enthalpy' in tsk: filelabel = 'enthalpy' if spc_mod_dct_i: filelabel += '_{}'.format(spc_mod_dct_i['harm']) filelabel += '_{}'.format(spc_mod_dct_i['ene']) else: filelabel += '_{}'.format(proc_keyword_dct['geolvl']) filelabel += '_{}'.format(proc_keyword_dct['proplvl']) filelabel = '.csv' energy = None pf_filesystems = filesys.models.pf_filesys(spc_dct_i, spc_mod_dct_i, run_prefix, save_prefix, saddle=False) ene_abs = ene.read_energy(spc_dct_i, pf_filesystems, spc_mod_dct_i, run_prefix, conf=(locs, locs_path, cnf_fs), read_ene=True, read_zpe=True, saddle=False) hf0k, _, chn_basis_ene_dct, hbasis = basis.enthalpy_calculation( spc_dct, spc_name, ene_abs, chn_basis_ene_dct, pes_mod_dct_i, spc_mod_dct_i, run_prefix, save_prefix, pforktp='pf', zrxn=None) spc_basis, coeff_basis = hbasis[spc_name] coeff_array = [] for spc_i in spc_basis: if spc_i not in spc_array: spc_array.append(spc_i) for spc_i in spc_array: if spc_i in spc_basis: coeff_array.append(coeff_basis[spc_basis.index(spc_i)]) else: coeff_array.append(0) csv_data[label] = [locs_path, ene_abs, hf0k, *coeff_array] util.write_csv_data(tsk, csv_data, filelabel, spc_array)
def run(spc_rlst, therm_tsk_lst, pes_mod_dct, spc_mod_dct, spc_dct, run_prefix, save_prefix): """ main driver for thermo run """ # Print Header fo ioprinter.info_message('Calculating Thermochem:') ioprinter.runlst(('SPC', 0, 0), spc_rlst) # ------------------------------------------------ # # PREPARE INFORMATION TO PASS TO THERMDRIVER TASKS # # ------------------------------------------------ # # Build a list of the species to calculate thermochem for loops below spc_mods = list(spc_mod_dct.keys()) # hack split_spc_lst = split_unstable_spc(spc_rlst, spc_dct, spc_mod_dct[spc_mods[0]], save_prefix) spc_queue = parser.rlst.spc_queue('spc', tuple(split_spc_lst.values())[0]) # Build the paths [(messpf, nasa)], models and levels for each spc thm_paths = thermo_paths(spc_dct, spc_queue, spc_mods, run_prefix) # ----------------------------------- # # RUN THE REQUESTED THERMDRIVER TASKS # # ----------------------------------- # # Write and Run MESSPF inputs to generate the partition functions write_messpf_tsk = parser.run.extract_task('write_mess', therm_tsk_lst) if write_messpf_tsk is not None: ioprinter.messpf('write_header') spc_mods, pes_mod = parser.models.extract_models(write_messpf_tsk) for idx, spc_name in enumerate(spc_queue): print('write test {}'.format(spc_name)) for spc_mod in spc_mods: messpf_inp_str = thmroutines.qt.make_messpf_str( pes_mod_dct[pes_mod]['therm_temps'], spc_dct, spc_name, pes_mod_dct[pes_mod], spc_mod_dct[spc_mod], run_prefix, save_prefix) ioprinter.messpf('input_string') ioprinter.info_message(messpf_inp_str) autorun.write_input(thm_paths[idx][spc_mod][0], messpf_inp_str, input_name='pf.inp') # Run the MESSPF files that have been written run_messpf_tsk = parser.run.extract_task('run_mess', therm_tsk_lst) if run_messpf_tsk is not None: spc_mod, pes_mod = parser.models.extract_models(run_messpf_tsk) spc_mods = parser.models.split_model(spc_mod[0]) ioprinter.messpf('run_header') for idx, spc_name in enumerate(spc_queue): _spc_mods, coeffs, operators = spc_mods # Run MESSPF for all requested models, combine the PFS at the end ioprinter.message('Run MESSPF: {}'.format(spc_name), newline=1) _pfs = [] for spc_mod in _spc_mods: autorun.run_script(autorun.SCRIPT_DCT['messpf'], thm_paths[idx][spc_mod][0]) _pfs.append( pfrunner.mess.read_messpf(thm_paths[idx][spc_mod][0])) final_pf = pfrunner.mess.combine_pfs(_pfs, coeffs, operators) # need to clean thm path build tot_idx = len(spc_mods) spc_info = sinfo.from_dct(spc_dct[spc_name]) spc_fml = automol.inchi.formula_string(spc_info[0]) thm_prefix = [spc_fml, automol.inchi.inchi_key(spc_info[0])] thm_paths[idx]['final'] = (job_path(run_prefix, 'MESS', 'PF', thm_prefix, locs_idx=tot_idx), job_path(run_prefix, 'THERM', 'NASA', thm_prefix, locs_idx=tot_idx)) pfrunner.mess.write_mess_output(fstring( spc_dct[spc_name]['inchi']), final_pf, thm_paths[idx]['final'][0], filename='pf.dat') # Use MESS partition functions to compute thermo quantities run_fit_tsk = parser.run.extract_task('run_fits', therm_tsk_lst) if run_fit_tsk is not None: spc_mods, pes_mod = parser.models.extract_models(run_fit_tsk) pes_mod_dct_i = pes_mod_dct[pes_mod] ioprinter.nasa('header') chn_basis_ene_dct = {} for idx, spc_name in enumerate(spc_queue): # Take species model and add it to the chn_basis_ene dct spc_mod = spc_mods[0] spc_mod_dct_i = spc_mod_dct[spc_mod] if spc_mod not in chn_basis_ene_dct: chn_basis_ene_dct[spc_mod] = {} # Get the reference scheme and energies (ref in different place) ref_scheme = pes_mod_dct_i['therm_fit']['ref_scheme'] ref_enes = pes_mod_dct_i['therm_fit']['ref_enes'] # Determine info about the basis species used in thermochem calcs basis_dct, uniref_dct = thmroutines.basis.prepare_refs( ref_scheme, spc_dct, [[spc_name, None]], run_prefix, save_prefix) # Get the basis info for the spc of interest spc_basis, coeff_basis = basis_dct[spc_name] # Get the energies for the spc and its basis ene_basis = [] energy_missing = False for spc_basis_i in spc_basis: if spc_basis_i in chn_basis_ene_dct[spc_mod]: ioprinter.message( 'Energy already found for basis species: ' + spc_basis_i) ene_basis.append(chn_basis_ene_dct[spc_mod][spc_basis_i]) else: ioprinter.message( 'Energy will be determined for basis species: ' + spc_basis_i) energy_missing = True if not energy_missing: pf_filesystems = filesys.models.pf_filesys(spc_dct[spc_name], spc_mod_dct_i, run_prefix, save_prefix, saddle=False) ene_spc = ene.read_energy(spc_dct[spc_name], pf_filesystems, spc_mod_dct_i, run_prefix, read_ene=True, read_zpe=True, saddle=False) else: ene_spc, ene_basis = thmroutines.basis.basis_energy( spc_name, spc_basis, uniref_dct, spc_dct, spc_mod_dct_i, run_prefix, save_prefix) for spc_basis_i, ene_basis_i in zip(spc_basis, ene_basis): chn_basis_ene_dct[spc_mod][spc_basis_i] = ene_basis_i # Calculate and store the 0 K Enthalpy hf0k = thmroutines.heatform.calc_hform_0k(ene_spc, ene_basis, spc_basis, coeff_basis, ref_set=ref_enes) spc_dct[spc_name]['Hfs'] = [hf0k] # Write the NASA polynomials in CHEMKIN format ckin_nasa_str = '' ckin_path = output_path('CKIN') for idx, spc_name in enumerate(spc_queue): ioprinter.nasa('calculate', spc_name) # Write the header describing the models used in thermo calcs ckin_nasa_str += writer.ckin.model_header(spc_mods, spc_mod_dct) # Build and write the NASA polynomial in CHEMKIN-format string # Call dies if you haven't run "write mess" task ckin_nasa_str += thmroutines.nasapoly.build_polynomial( spc_name, spc_dct, thm_paths[idx]['final'][0], thm_paths[idx]['final'][1]) ckin_nasa_str += '\n\n' print(ckin_nasa_str) nasa7_params_all = chemkin_io.parser.thermo.create_spc_nasa7_dct( ckin_nasa_str) # print('ckin_nasa_str test', ckin_nasa_str) ioprinter.info_message( 'SPECIES H(0 K) H(298 K) S(298 K) Cp(300 K) Cp(500 K) Cp(1000 K) Cp(1500 K)\n' ) ioprinter.info_message( ' kcal/mol kcal/mol cal/(mol K) ... \n') for spc_name in nasa7_params_all: nasa7_params = nasa7_params_all[spc_name] whitespace = 18 - len(spc_name) h0 = spc_dct[spc_name]['Hfs'][0] h298 = mechanalyzer.calculator.thermo.enthalpy( nasa7_params, 298.15) / 1000. s298 = mechanalyzer.calculator.thermo.entropy(nasa7_params, 298.15) cp300 = mechanalyzer.calculator.thermo.heat_capacity( nasa7_params, 300) cp500 = mechanalyzer.calculator.thermo.heat_capacity( nasa7_params, 500) cp1000 = mechanalyzer.calculator.thermo.heat_capacity( nasa7_params, 1000) cp1500 = mechanalyzer.calculator.thermo.heat_capacity( nasa7_params, 1500) whitespace = whitespace * ' ' ioprinter.info_message( '{}{}{:>7.2f}{:>9.2f}{:>9.2f}{:>9.2f}{:>9.2f}{:>9.2f}{:>9.2f}'. format(spc_name, whitespace, h0, h298, s298, cp300, cp500, cp1000, cp1500)) # Write all of the NASA polynomial strings writer.ckin.write_nasa_file(ckin_nasa_str, ckin_path)
def tau_data(spc_dct_i, spc_mod_dct_i, run_prefix, save_prefix, saddle=False): """ Read the filesystem to get information for TAU """ # Set up model and basic thy objects spc_info = sinfo.from_dct(spc_dct_i) thy_info = spc_mod_dct_i['vib']['geolvl'][1][1] mod_thy_info = tinfo.modify_orb_label(thy_info, spc_info) vib_model = spc_mod_dct_i['vib']['mod'] # Set up reference conformer filesys pf_filesystems = filesys.models.pf_filesys(spc_dct_i, spc_mod_dct_i, run_prefix, save_prefix, saddle) [harm_save_fs, _, harm_min_locs, _, _] = pf_filesystems['harm'] # Obtain all values from initial reference conformer rotors = tors.build_rotors(spc_dct_i, pf_filesystems, spc_mod_dct_i, read_potentials=False) vib_info = vib.full_vib_analysis(spc_dct_i, pf_filesystems, spc_mod_dct_i, run_prefix, zrxn=None) freqs, _, zpe, _, tors_strs, _, harm_freqs, _ = vib_info harm_zpve = 0.5 * sum(harm_freqs) * phycon.WAVEN2EH ioprinter.info_message('Determining the symmetry factor...', newline=1) sym_factor = symm.symmetry_factor( pf_filesystems, spc_mod_dct_i, spc_dct_i, rotors, ) zpe_chnlvl = zpe * phycon.EH2KCAL ref_ene = harm_zpve * phycon.EH2KCAL ref_geom = [harm_save_fs[-1].file.geometry.read(harm_min_locs)] ref_grad = [harm_save_fs[-1].file.gradient.read(harm_min_locs)] ref_hessian = [harm_save_fs[-1].file.hessian.read(harm_min_locs)] min_cnf_ene = filesys.read.energy(harm_save_fs, harm_min_locs, mod_thy_info) # Set up the TAU filesystem objects, get locs, and read info _, tau_save_fs = filesys.build_fs(run_prefix, save_prefix, 'TAU', spc_locs=spc_info, thy_locs=mod_thy_info[1:]) db_style = 'jsondb' vib_model = spc_mod_dct_i['vib']['mod'] if vib_model == 'tau': if db_style == 'directory': tau_locs = [ locs for locs in tau_save_fs[-1].existing() if tau_save_fs[-1].file.hessian.exists(locs) ] elif db_style == 'jsondb': tau_locs = [ locs for locs in tau_save_fs[-1].json_existing() if tau_save_fs[-1].json.hessian.exists(locs) ] else: if db_style == 'directory': tau_locs = tau_save_fs[-1].existing() elif db_style == 'jsondb': tau_locs = tau_save_fs[-1].json_existing() ioprinter.info_message( 'Reading data for the Monte Carlo samples from db.json' f'at path {tau_save_fs[0].path()}') samp_geoms, samp_enes, samp_grads, samp_hessians = [], [], [], [] tot_locs = len(tau_locs) for idx, locs in enumerate(tau_locs): if db_style == 'directory': geo = tau_save_fs[-1].file.geometry.read(locs) elif db_style == 'jsondb': geo = tau_save_fs[-1].json.geometry.read(locs) # geo_str = autofile.data_types.swrite.geometry(geo) samp_geoms.append(geo) if db_style == 'directory': tau_ene = tau_save_fs[-1].file.energy.read(locs) elif db_style == 'jsondb': tau_ene = tau_save_fs[-1].json.energy.read(locs) rel_ene = (tau_ene - min_cnf_ene) * phycon.EH2KCAL # ene_str = autofile.data_types.swrite.energy(rel_ene) samp_enes.append(rel_ene) if vib_model == 'tau': if db_style == 'directory': grad = tau_save_fs[-1].file.gradient.read(locs) elif db_style == 'jsondb': grad = tau_save_fs[-1].json.gradient.read(locs) # grad_str = autofile.data_types.swrite.gradient(grad) samp_grads.append(grad) if db_style == 'directory': hess = tau_save_fs[-1].file.hessian.read(locs) elif db_style == 'jsondb': hess = tau_save_fs[-1].json.hessian.read(locs) # hess_str = autofile.data_types.swrite.hessian(hess) samp_hessians.append(hess) # Print progress message (every 150 geoms read) if idx % 149 == 0: print(f'Read {idx+1}/{tot_locs} samples...') # Determine the successful conformer ratio inf_obj = tau_save_fs[0].file.info.read() excluded_volume_factor = len(samp_geoms) / inf_obj.nsamp print('excluded volume factor test:', excluded_volume_factor, len(samp_geoms), inf_obj.nsamp) # Create info dictionary keys = [ 'geom', 'sym_factor', 'elec_levels', 'freqs', 'flux_mode_str', 'samp_geoms', 'samp_enes', 'samp_grads', 'samp_hessians', 'ref_geom', 'ref_grad', 'ref_hessian', 'zpe_chnlvl', 'ref_ene', 'excluded_volume_factor' ] vals = [ ref_geom[0], sym_factor, spc_dct_i['elec_levels'], freqs, tors_strs[2], samp_geoms, samp_enes, samp_grads, samp_hessians, ref_geom, ref_grad, ref_hessian, zpe_chnlvl, ref_ene, excluded_volume_factor ] inf_dct = dict(zip(keys, vals)) return inf_dct
def run(pes_rlst, spc_rlst, therm_tsk_lst, pes_mod_dct, spc_mod_dct, spc_dct, run_prefix, save_prefix): """ Executes all thermochemistry tasks. :param pes_rlst: species from PESs to run [(PES formula, PES idx, SUP-PES idx) (CHANNEL idx, (REACS, PRODS)) :type pes_rlst: tuple(dict[str: dict]) :param spc_rlst: lst of species to run :type spc_rlst: tuple(dict[str: dict]) :param es_tsk_lst: list of the electronic structure tasks tuple(tuple(obj, tsk, keyword_dict)) :type es_tsk_lst: tuple(tuple(str, str, dict)) :param spc_dct: species information dict[spc_name: spc_information] :type spc_dct: dict[str:dict] :param glob_dct: global information for all species dict[spc_name: spc_information] :type glob_dct: dict[str: dict] :param thy_dct: all of the theory information dict[thy name: inf] :type thy_dct: dict[str:dict] :param run_prefix: root-path to the run-filesystem :type run_prefix: str :param save_prefix: root-path to the save-filesystem :type save_prefix: str """ # Print Header ioprinter.info_message('Calculating Thermochem:') ioprinter.runlst(('SPC', 0, 0), spc_rlst) # ------------------------------------------------ # # PREPARE INFORMATION TO PASS TO THERMDRIVER TASKS # # ------------------------------------------------ # # Build a list of the species to calculate thermochem for loops below spc_mods = list(spc_mod_dct.keys()) # hack spc_mod_dct_i = spc_mod_dct[spc_mods[0]] split_rlst = split_unstable_full( pes_rlst, spc_rlst, spc_dct, spc_mod_dct_i, save_prefix) spc_queue = parser.rlst.spc_queue( tuple(split_rlst.values())[0], 'SPC') # Build the paths [(messpf, nasa)], models and levels for each spc thm_paths = thermo_paths(spc_dct, spc_queue, spc_mods, run_prefix) # ----------------------------------- # # RUN THE REQUESTED THERMDRIVER TASKS # # ----------------------------------- # # Write and Run MESSPF inputs to generate the partition functions write_messpf_tsk = parser.run.extract_task('write_mess', therm_tsk_lst) if write_messpf_tsk is not None: ioprinter.messpf('write_header') spc_mods, pes_mod = parser.models.extract_models(write_messpf_tsk) for idx, spc_name in enumerate(spc_queue): print('write test {}'.format(spc_name)) for spc_mod in spc_mods: messpf_inp_str = thmroutines.qt.make_messpf_str( pes_mod_dct[pes_mod]['therm_temps'], spc_dct, spc_name, pes_mod_dct[pes_mod], spc_mod_dct[spc_mod], run_prefix, save_prefix) ioprinter.messpf('input_string') ioprinter.info_message(messpf_inp_str) autorun.write_input( thm_paths[idx][spc_mod][0], messpf_inp_str, input_name='pf.inp') # Run the MESSPF files that have been written run_messpf_tsk = parser.run.extract_task('run_mess', therm_tsk_lst) if run_messpf_tsk is not None: spc_mod, pes_mod = parser.models.extract_models(run_messpf_tsk) spc_mods = parser.models.split_model(spc_mod[0]) ioprinter.messpf('run_header') for idx, spc_name in enumerate(spc_queue): _spc_mods, coeffs, operators = spc_mods # Run MESSPF for all requested models, combine the PFS at the end ioprinter.message('Run MESSPF: {}'.format(spc_name), newline=1) _pfs = [] for spc_mod in _spc_mods: autorun.run_script( autorun.SCRIPT_DCT['messpf'], thm_paths[idx][spc_mod][0]) _pfs.append( reader.mess.messpf(thm_paths[idx][spc_mod][0])) final_pf = thermfit.pf.combine(_pfs, coeffs, operators) # need to clean thm path build tdx = len(spc_mods) spc_info = sinfo.from_dct(spc_dct[spc_name]) spc_fml = automol.inchi.formula_string(spc_info[0]) thm_prefix = [spc_fml, automol.inchi.inchi_key(spc_info[0])] thm_paths[idx]['final'] = ( job_path(run_prefix, 'MESS', 'PF', thm_prefix, locs_idx=tdx), job_path(run_prefix, 'THERM', 'NASA', thm_prefix, locs_idx=tdx) ) writer.mess.output( fstring(spc_dct[spc_name]['inchi']), final_pf, thm_paths[idx]['final'][0], filename='pf.dat') # Use MESS partition functions to compute thermo quantities run_fit_tsk = parser.run.extract_task('run_fits', therm_tsk_lst) if run_fit_tsk is not None: spc_mods, pes_mod = parser.models.extract_models(run_fit_tsk) pes_mod_dct_i = pes_mod_dct[pes_mod] ioprinter.nasa('header') chn_basis_ene_dct = {} for idx, spc_name in enumerate(spc_queue): # Take species model and add it to the chn_basis_ene dct spc_mod = spc_mods[0] spc_mod_dct_i = spc_mod_dct[spc_mod] if spc_mod not in chn_basis_ene_dct: chn_basis_ene_dct[spc_mod] = {} # Get the reference scheme and energies (ref in different place) ref_scheme = pes_mod_dct_i['therm_fit']['ref_scheme'] ref_enes = pes_mod_dct_i['therm_fit']['ref_enes'] # Determine info about the basis species used in thermochem calcs basis_dct, uniref_dct = thermfit.prepare_refs( ref_scheme, spc_dct, (spc_name,)) # Get the basis info for the spc of interest spc_basis, coeff_basis = basis_dct[spc_name] # Get the energies for the spc and its basis ene_basis = [] energy_missing = False for spc_basis_i in spc_basis: if spc_basis_i in chn_basis_ene_dct[spc_mod]: ioprinter.message( 'Energy already found for basis species: ' + spc_basis_i) ene_basis.append(chn_basis_ene_dct[spc_mod][spc_basis_i]) else: ioprinter.message( 'Energy will be determined for basis species: ' + spc_basis_i) energy_missing = True if not energy_missing: pf_filesystems = filesys.models.pf_filesys( spc_dct[spc_name], spc_mod_dct_i, run_prefix, save_prefix, saddle=False) ene_spc = ene.read_energy( spc_dct[spc_name], pf_filesystems, spc_mod_dct_i, run_prefix, read_ene=True, read_zpe=True, saddle=False) else: ene_spc, ene_basis = thmroutines.basis.basis_energy( spc_name, spc_basis, uniref_dct, spc_dct, spc_mod_dct_i, run_prefix, save_prefix) for spc_basis_i, ene_basis_i in zip(spc_basis, ene_basis): chn_basis_ene_dct[spc_mod][spc_basis_i] = ene_basis_i # Calculate and store the 0 K Enthalpy hf0k = thermfit.heatform.calc_hform_0k( ene_spc, ene_basis, spc_basis, coeff_basis, ref_set=ref_enes) spc_dct[spc_name]['Hfs'] = [hf0k] # Write the NASA polynomials in CHEMKIN format ckin_nasa_str = '' ckin_path = output_path('CKIN') for idx, spc_name in enumerate(spc_queue): ioprinter.nasa('calculate', spc_name) # Write the header describing the models used in thermo calcs ckin_nasa_str += writer.ckin.model_header(spc_mods, spc_mod_dct) # Build and write the NASA polynomial in CHEMKIN-format string # Call dies if you haven't run "write mess" task ckin_nasa_str += thmroutines.nasapoly.build_polynomial( spc_name, spc_dct, thm_paths[idx]['final'][0], thm_paths[idx]['final'][1]) ckin_nasa_str += '\n\n' print('CKIN NASA STR\n') print(ckin_nasa_str) nasa7_params_all = chemkin_io.parser.thermo.create_spc_nasa7_dct( ckin_nasa_str) ioprinter.info_message( 'SPECIES\t\tH(0 K)[kcal/mol]\tH(298 K)[kcal/mol]\t' + 'S(298 K)[cal/mol K]\n') for spc_name in nasa7_params_all: nasa7_params = nasa7_params_all[spc_name] ht0 = spc_dct[spc_name]['Hfs'][0] ht298 = mechanalyzer.calculator.thermo.enthalpy( nasa7_params, 298.15) st298 = mechanalyzer.calculator.thermo.entropy( nasa7_params, 298.15) ioprinter.info_message( '{}\t{:3.2f}\t{:3.2f}\t{:3.2f}'.format( spc_name, ht0, ht298/1000., st298)) # Write all of the NASA polynomial strings writer.ckin.write_nasa_file(ckin_nasa_str, ckin_path)