def compute(self, output_dir): outcar = os.path.join(output_dir, 'OUTCAR') if not os.path.isfile(outcar): dlog.warning("cannot find OUTCAR in " + output_dir + " skip") return None else: ls = LabeledSystem(outcar) stress = [] with open(outcar, 'r') as fin: lines = fin.read().split('\n') for line in lines: if 'in kB' in line: stress_xx = float(line.split()[2]) stress_yy = float(line.split()[3]) stress_zz = float(line.split()[4]) stress_xy = float(line.split()[5]) stress_yz = float(line.split()[6]) stress_zx = float(line.split()[7]) stress.append([]) stress[-1].append([stress_xx, stress_xy, stress_zx]) stress[-1].append([stress_xy, stress_yy, stress_yz]) stress[-1].append([stress_zx, stress_yz, stress_zz]) outcar_dict = ls.as_dict() outcar_dict['data']['stress'] = { "@module": "numpy", "@class": "array", "dtype": "float64", "data": stress } return outcar_dict
def make_property(confs, inter_param, property_list): # find all POSCARs and their name like mp-xxx # ... # conf_dirs = glob.glob(confs) # conf_dirs.sort() conf_dirs = [] for conf in confs: conf_dirs.extend(glob.glob(conf)) conf_dirs.sort() for ii in conf_dirs: sepline(ch=ii, screen=True) for jj in property_list: if jj.get("skip", False): continue if 'init_from_suffix' and 'output_suffix' in jj: do_refine = True suffix = jj['output_suffix'] elif 'reproduce' in jj and jj['reproduce']: do_refine = False suffix = 'reprod' else: do_refine = False suffix = '00' # generate working directory like mp-xxx/eos_00 if jj['type'] == 'eos' # handel the exception that the working directory exists # ... # determine the suffix: from scratch or refine # ... property_type = jj['type'] path_to_equi = os.path.join(ii, 'relaxation', 'relax_task') path_to_work = os.path.join(ii, property_type + '_' + suffix) if os.path.exists(path_to_work): dlog.warning('%s already exists' % path_to_work) else: os.makedirs(path_to_work) prop = make_property_instance(jj) task_list = prop.make_confs(path_to_work, path_to_equi, do_refine) inter_param_prop = inter_param if 'cal_setting' in jj and 'overwrite_interaction' in jj[ 'cal_setting']: inter_param_prop = jj['cal_setting']['overwrite_interaction'] for kk in task_list: poscar = os.path.join(kk, 'POSCAR') inter = make_calculator(inter_param_prop, poscar) inter.make_potential_files(kk) dlog.debug(prop.task_type()) ### debug inter.make_input_file(kk, prop.task_type(), prop.task_param()) prop.post_process( task_list ) # generate same KPOINTS file for elastic when doing VASP
def exec_command(self, cmd, retry = 0): """Calling self.ssh.exec_command but has an exception check.""" try: return self.ssh.exec_command(cmd) except paramiko.ssh_exception.SSHException: # SSH session not active # retry for up to 3 times if retry < 3: dlog.warning("SSH session not active in calling %s, retry the command..." % cmd) # ensure alive self.ensure_alive() return self.exec_command(cmd, retry = retry+1) raise RuntimeError("SSH session not active")
def block_checkcall(self, cmd, retry=0) : self.ssh_session.ensure_alive() stdin, stdout, stderr = self.ssh_session.exec_command(('cd %s ;' % self.remote_root) + cmd) exit_status = stdout.channel.recv_exit_status() if exit_status != 0: if retry<3: # sleep 60 s dlog.warning("Get error code %d in calling %s through ssh with job: %s . message: %s" % (exit_status, cmd, self.job_uuid, stderr.read().decode('utf-8'))) dlog.warning("Sleep 60 s and retry the command...") time.sleep(60) return self.block_checkcall(cmd, retry=retry+1) raise RuntimeError("Get error code %d in calling %s through ssh with job: %s . message: %s" % (exit_status, cmd, self.job_uuid, stderr.read().decode('utf-8'))) return stdin, stdout, stderr
def make_confs(self, path_to_work, path_to_equi, refine=False): path_to_work = os.path.abspath(path_to_work) if os.path.exists(path_to_work): dlog.warning('%s already exists' % path_to_work) else: os.makedirs(path_to_work) path_to_equi = os.path.abspath(path_to_equi) if 'start_confs_path' in self.parameter and os.path.exists( self.parameter['start_confs_path']): init_path_list = glob.glob( os.path.join(self.parameter['start_confs_path'], '*')) struct_init_name_list = [] for ii in init_path_list: struct_init_name_list.append(ii.split('/')[-1]) struct_output_name = path_to_work.split('/')[-2] assert struct_output_name in struct_init_name_list path_to_equi = os.path.abspath( os.path.join(self.parameter['start_confs_path'], struct_output_name, 'relaxation', 'relax_task')) task_list = [] cwd = os.getcwd() equi_contcar = os.path.join(path_to_equi, 'CONTCAR') os.chdir(path_to_work) if os.path.isfile('POSCAR'): os.remove('POSCAR') if os.path.islink('POSCAR'): os.remove('POSCAR') os.symlink(os.path.relpath(equi_contcar), 'POSCAR') # task_poscar = os.path.join(output, 'POSCAR') # stress, deal with unsupported stress in dpdata #with open(os.path.join(path_to_equi, 'result.json')) as fin: # equi_result = json.load(fin) #equi_stress = np.array(equi_result['stress']['data'])[-1] equi_result = loadfn(os.path.join(path_to_equi, 'result.json')) equi_stress = equi_result['stress'][-1] dumpfn(equi_stress, 'equi.stress.json', indent=4) os.chdir(cwd) if refine: print('elastic refine starts') task_list = make_refine(self.parameter['init_from_suffix'], self.parameter['output_suffix'], path_to_work) # record strain # df = Strain.from_deformation(dfm_ss.deformations[idid]) # dumpfn(df.as_dict(), 'strain.json', indent=4) init_from_path = re.sub(self.parameter['output_suffix'][::-1], self.parameter['init_from_suffix'][::-1], path_to_work[::-1], count=1)[::-1] task_list_basename = list(map(os.path.basename, task_list)) for ii in task_list_basename: init_from_task = os.path.join(init_from_path, ii) output_task = os.path.join(path_to_work, ii) os.chdir(output_task) if os.path.isfile('strain.json'): os.remove('strain.json') copyfile(os.path.join(init_from_task, 'strain.json'), 'strain.json') #os.symlink(os.path.relpath( # os.path.join((re.sub(self.parameter['output_suffix'], self.parameter['init_from_suffix'], ii)), # 'strain.json')), # 'strain.json') os.chdir(cwd) else: norm_def = self.norm_deform shear_def = self.shear_deform norm_strains = [ -norm_def, -0.5 * norm_def, 0.5 * norm_def, norm_def ] shear_strains = [ -shear_def, -0.5 * shear_def, 0.5 * shear_def, shear_def ] if not os.path.exists(equi_contcar): raise RuntimeError("please do relaxation first") ss = Structure.from_file(equi_contcar) dfm_ss = DeformedStructureSet(ss, symmetry=False, norm_strains=norm_strains, shear_strains=shear_strains) n_dfm = len(dfm_ss) print('gen with norm ' + str(norm_strains)) print('gen with shear ' + str(shear_strains)) for ii in range(n_dfm): output_task = os.path.join(path_to_work, 'task.%06d' % ii) os.makedirs(output_task, exist_ok=True) os.chdir(output_task) for jj in [ 'INCAR', 'POTCAR', 'POSCAR', 'conf.lmp', 'in.lammps' ]: if os.path.exists(jj): os.remove(jj) task_list.append(output_task) dfm_ss.deformed_structures[ii].to('POSCAR', 'POSCAR') # record strain df = Strain.from_deformation(dfm_ss.deformations[ii]) dumpfn(df.as_dict(), 'strain.json', indent=4) os.chdir(cwd) return task_list
def make_confs(self, path_to_work, path_to_equi, refine=False): path_to_work = os.path.abspath(path_to_work) if os.path.exists(path_to_work): dlog.warning('%s already exists' % path_to_work) else: os.makedirs(path_to_work) path_to_equi = os.path.abspath(path_to_equi) if 'start_confs_path' in self.parameter and os.path.exists( self.parameter['start_confs_path']): init_path_list = glob.glob( os.path.join(self.parameter['start_confs_path'], '*')) struct_init_name_list = [] for ii in init_path_list: struct_init_name_list.append(ii.split('/')[-1]) struct_output_name = path_to_work.split('/')[-2] assert struct_output_name in struct_init_name_list path_to_equi = os.path.abspath( os.path.join(self.parameter['start_confs_path'], struct_output_name, 'relaxation', 'relax_task')) cwd = os.getcwd() task_list = [] if self.reprod: print('eos reproduce starts') if 'init_data_path' not in self.parameter: raise RuntimeError( "please provide the initial data path to reproduce") init_data_path = os.path.abspath(self.parameter['init_data_path']) task_list = make_repro( init_data_path, self.init_from_suffix, path_to_work, self.parameter.get('reprod_last_frame', True)) os.chdir(cwd) else: if refine: print('eos refine starts') task_list = make_refine(self.parameter['init_from_suffix'], self.parameter['output_suffix'], path_to_work) os.chdir(cwd) init_from_path = re.sub( self.parameter['output_suffix'][::-1], self.parameter['init_from_suffix'][::-1], path_to_work[::-1], count=1)[::-1] task_list_basename = list(map(os.path.basename, task_list)) for ii in task_list_basename: init_from_task = os.path.join(init_from_path, ii) output_task = os.path.join(path_to_work, ii) os.chdir(output_task) if os.path.isfile('eos.json'): os.remove('eos.json') if os.path.islink('eos.json'): os.remove('eos.json') os.symlink( os.path.relpath( os.path.join(init_from_task, 'eos.json')), 'eos.json') os.chdir(cwd) else: print('gen eos from ' + str(self.vol_start) + ' to ' + str(self.vol_end) + ' by every ' + str(self.vol_step)) equi_contcar = os.path.join(path_to_equi, 'CONTCAR') if not os.path.exists(equi_contcar): raise RuntimeError("please do relaxation first") vol_to_poscar = vasp.poscar_vol( equi_contcar) / vasp.poscar_natoms(equi_contcar) self.parameter['scale2equi'] = [] task_num = 0 while self.vol_start + self.vol_step * task_num < self.vol_end: # for vol in np.arange(int(self.vol_start * 100), int(self.vol_end * 100), int(self.vol_step * 100)): # vol = vol / 100.0 vol = self.vol_start + task_num * self.vol_step #task_num = int((vol - self.vol_start) / self.vol_step) output_task = os.path.join(path_to_work, 'task.%06d' % task_num) os.makedirs(output_task, exist_ok=True) os.chdir(output_task) for ii in [ 'INCAR', 'POTCAR', 'POSCAR.orig', 'POSCAR', 'conf.lmp', 'in.lammps' ]: if os.path.exists(ii): os.remove(ii) task_list.append(output_task) os.symlink(os.path.relpath(equi_contcar), 'POSCAR.orig') # scale = (vol / vol_to_poscar) ** (1. / 3.) scale = vol**(1. / 3.) eos_params = { 'volume': vol * vol_to_poscar, 'scale': scale } dumpfn(eos_params, 'eos.json', indent=4) self.parameter['scale2equi'].append(scale) # 06/22 vasp.poscar_scale('POSCAR.orig', 'POSCAR', scale) task_num += 1 os.chdir(cwd) return task_list
def make_equi(confs, inter_param, relax_param): # find all POSCARs and their name like mp-xxx # ... dlog.debug('debug info make equi') if 'type_map' in inter_param: ele_list = [key for key in inter_param['type_map'].keys()] else: ele_list = [key for key in inter_param['potcars'].keys()] # ele_list = inter_param['type_map'] dlog.debug("ele_list %s" % ':'.join(ele_list)) conf_dirs = [] for conf in confs: conf_dirs.extend(glob.glob(conf)) conf_dirs.sort() # generate a list of task names like mp-xxx/relaxation/relax_task # ... cwd = os.getcwd() # generate poscar for single element crystal if len(ele_list) == 1: for ii in conf_dirs: os.chdir(ii) crys_type = ii.split('/')[-1] dlog.debug('crys_type: %s' % crys_type) dlog.debug('pwd: %s' % os.getcwd()) if crys_type == 'std-fcc': if not os.path.exists('POSCAR'): crys.fcc1(ele_list[0]).to('POSCAR', 'POSCAR') elif crys_type == 'std-hcp': if not os.path.exists('POSCAR'): crys.hcp(ele_list[0]).to('POSCAR', 'POSCAR') elif crys_type == 'std-dhcp': if not os.path.exists('POSCAR'): crys.dhcp(ele_list[0]).to('POSCAR', 'POSCAR') elif crys_type == 'std-bcc': if not os.path.exists('POSCAR'): crys.bcc(ele_list[0]).to('POSCAR', 'POSCAR') elif crys_type == 'std-diamond': if not os.path.exists('POSCAR'): crys.diamond(ele_list[0]).to('POSCAR', 'POSCAR') elif crys_type == 'std-sc': if not os.path.exists('POSCAR'): crys.sc(ele_list[0]).to('POSCAR', 'POSCAR') os.chdir(cwd) task_dirs = [] # make task directories like mp-xxx/relaxation/relax_task # if mp-xxx/exists then print a warning and exit. # ... for ii in conf_dirs: crys_type = ii.split('/')[-1] dlog.debug('crys_type: %s' % crys_type) if 'mp-' in crys_type and not os.path.exists(os.path.join( ii, 'POSCAR')): get_structure(crys_type).to('POSCAR', os.path.join(ii, 'POSCAR')) poscar = os.path.abspath(os.path.join(ii, 'POSCAR')) if not os.path.exists(poscar): raise FileNotFoundError('no configuration for autotest') relax_dirs = os.path.abspath( os.path.join(ii, 'relaxation', 'relax_task') ) # to be consistent with property in make dispatcher if os.path.exists(relax_dirs): dlog.warning('%s already exists' % relax_dirs) else: os.makedirs(relax_dirs) task_dirs.append(relax_dirs) os.chdir(relax_dirs) # copy POSCARs to mp-xxx/relaxation/relax_task # ... if os.path.isfile('POSCAR'): os.remove('POSCAR') os.symlink(os.path.relpath(poscar), 'POSCAR') os.chdir(cwd) task_dirs.sort() # generate task files relax_param['cal_type'] = 'relaxation' if 'cal_setting' not in relax_param: relax_param['cal_setting'] = { "relax_pos": True, "relax_shape": True, "relax_vol": True } elif "relax_pos" not in relax_param['cal_setting']: relax_param['cal_setting']['relax_pos'] = True elif "relax_shape" not in relax_param['cal_setting']: relax_param['cal_setting']['relax_shape'] = True elif "relax_vol" not in relax_param['cal_setting']: relax_param['cal_setting']['relax_vol'] = True for ii in task_dirs: poscar = os.path.join(ii, 'POSCAR') dlog.debug('task_dir %s' % ii) inter = make_calculator(inter_param, poscar) inter.make_potential_files(ii) inter.make_input_file(ii, 'relaxation', relax_param)
def make_confs(self, path_to_work, path_to_equi, refine=False): path_to_work = os.path.abspath(path_to_work) if os.path.exists(path_to_work): dlog.warning('%s already exists' % path_to_work) else: os.makedirs(path_to_work) path_to_equi = os.path.abspath(path_to_equi) if 'start_confs_path' in self.parameter and os.path.exists(self.parameter['start_confs_path']): init_path_list = glob.glob(os.path.join(self.parameter['start_confs_path'], '*')) struct_init_name_list = [] for ii in init_path_list: struct_init_name_list.append(ii.split('/')[-1]) struct_output_name = path_to_work.split('/')[-2] assert struct_output_name in struct_init_name_list path_to_equi = os.path.abspath(os.path.join(self.parameter['start_confs_path'], struct_output_name, 'relaxation', 'relax_task')) task_list = [] cwd = os.getcwd() if self.reprod: print('surface reproduce starts') if 'init_data_path' not in self.parameter: raise RuntimeError("please provide the initial data path to reproduce") init_data_path = os.path.abspath(self.parameter['init_data_path']) task_list = make_repro(init_data_path, self.init_from_suffix, path_to_work, self.parameter.get('reprod_last_frame', True)) os.chdir(cwd) else: if refine: print('surface refine starts') task_list = make_refine(self.parameter['init_from_suffix'], self.parameter['output_suffix'], path_to_work) os.chdir(cwd) # record miller init_from_path = re.sub(self.parameter['output_suffix'][::-1], self.parameter['init_from_suffix'][::-1], path_to_work[::-1], count=1)[::-1] task_list_basename = list(map(os.path.basename, task_list)) for ii in task_list_basename: init_from_task = os.path.join(init_from_path, ii) output_task = os.path.join(path_to_work, ii) os.chdir(output_task) if os.path.isfile('miller.json'): os.remove('miller.json') if os.path.islink('miller.json'): os.remove('miller.json') os.symlink(os.path.relpath(os.path.join(init_from_task, 'miller.json')), 'miller.json') os.chdir(cwd) else: equi_contcar = os.path.join(path_to_equi, 'CONTCAR') if not os.path.exists(equi_contcar): raise RuntimeError("please do relaxation first") ptypes = vasp.get_poscar_types(equi_contcar) # gen structure ss = Structure.from_file(equi_contcar) # gen slabs all_slabs = generate_all_slabs(ss, self.miller, self.min_slab_size, self.min_vacuum_size) os.chdir(path_to_work) if os.path.isfile('POSCAR'): os.remove('POSCAR') if os.path.islink('POSCAR'): os.remove('POSCAR') os.symlink(os.path.relpath(equi_contcar), 'POSCAR') # task_poscar = os.path.join(output, 'POSCAR') for ii in range(len(all_slabs)): output_task = os.path.join(path_to_work, 'task.%06d' % ii) os.makedirs(output_task, exist_ok=True) os.chdir(output_task) for jj in ['INCAR', 'POTCAR', 'POSCAR', 'conf.lmp', 'in.lammps']: if os.path.exists(jj): os.remove(jj) task_list.append(output_task) print("# %03d generate " % ii, output_task, " \t %d atoms" % len(all_slabs[ii].sites)) # make confs all_slabs[ii].to('POSCAR', 'POSCAR.tmp') vasp.regulate_poscar('POSCAR.tmp', 'POSCAR') vasp.sort_poscar('POSCAR', 'POSCAR', ptypes) vasp.perturb_xz('POSCAR', 'POSCAR', self.pert_xz) # record miller dumpfn(all_slabs[ii].miller_index, 'miller.json') os.chdir(cwd) return task_list
def make_confs(self, path_to_work, path_to_equi, refine=False): path_to_work = os.path.abspath(path_to_work) if os.path.exists(path_to_work): dlog.warning('%s already exists' % path_to_work) else: os.makedirs(path_to_work) path_to_equi = os.path.abspath(path_to_equi) if 'start_confs_path' in self.parameter and os.path.exists( self.parameter['start_confs_path']): init_path_list = glob.glob( os.path.join(self.parameter['start_confs_path'], '*')) struct_init_name_list = [] for ii in init_path_list: struct_init_name_list.append(ii.split('/')[-1]) struct_output_name = path_to_work.split('/')[-2] assert struct_output_name in struct_init_name_list path_to_equi = os.path.abspath( os.path.join(self.parameter['start_confs_path'], struct_output_name, 'relaxation', 'relax_task')) task_list = [] cwd = os.getcwd() if self.reprod: print('vacancy reproduce starts') if 'init_data_path' not in self.parameter: raise RuntimeError( "please provide the initial data path to reproduce") init_data_path = os.path.abspath(self.parameter['init_data_path']) task_list = make_repro( init_data_path, self.init_from_suffix, path_to_work, self.parameter.get('reprod_last_frame', False)) os.chdir(cwd) else: if refine: print('vacancy refine starts') task_list = make_refine(self.parameter['init_from_suffix'], self.parameter['output_suffix'], path_to_work) init_from_path = re.sub( self.parameter['output_suffix'][::-1], self.parameter['init_from_suffix'][::-1], path_to_work[::-1], count=1)[::-1] task_list_basename = list(map(os.path.basename, task_list)) for ii in task_list_basename: init_from_task = os.path.join(init_from_path, ii) output_task = os.path.join(path_to_work, ii) os.chdir(output_task) if os.path.isfile('supercell.json'): os.remove('supercell.json') if os.path.islink('supercell.json'): os.remove('supercell.json') os.symlink( os.path.relpath( os.path.join(init_from_task, 'supercell.json')), 'supercell.json') os.chdir(cwd) else: equi_contcar = os.path.join(path_to_equi, 'CONTCAR') if not os.path.exists(equi_contcar): raise RuntimeError("please do relaxation first") ss = Structure.from_file(equi_contcar) vds = VacancyGenerator(ss) dss = [] for jj in vds: dss.append(jj.generate_defect_structure(self.supercell)) print('gen vacancy with supercell ' + str(self.supercell)) os.chdir(path_to_work) if os.path.isfile('POSCAR'): os.remove('POSCAR') if os.path.islink('POSCAR'): os.remove('POSCAR') os.symlink(os.path.relpath(equi_contcar), 'POSCAR') # task_poscar = os.path.join(output, 'POSCAR') for ii in range(len(dss)): output_task = os.path.join(path_to_work, 'task.%06d' % ii) os.makedirs(output_task, exist_ok=True) os.chdir(output_task) for jj in [ 'INCAR', 'POTCAR', 'POSCAR', 'conf.lmp', 'in.lammps' ]: if os.path.exists(jj): os.remove(jj) task_list.append(output_task) dss[ii].to('POSCAR', 'POSCAR') # np.savetxt('supercell.out', self.supercell, fmt='%d') dumpfn(self.supercell, 'supercell.json') os.chdir(cwd) return task_list