def cluster(path): rec = 'Cluster Cutting begins.\n' rec += '---' * 25 print(rec) record(path, rec) # read parameters from ini file Ini = ReadIni() name, slab_or_molecule, group, lattice_parameter, number_of_atoms, geometry, fixed_atoms = Ini.get_basic_info( ) center_atoms, factors, deleted_atoms, coord, add_h, out_layer_number = Ini.get_cluster( ) cutting_setting = [coord, add_h] record_data_json(path, 'central atoms', center_atoms, section='cluster') record_data_json(path, 'cutting factors', factors, section='cluster') record_data_json(path, 'deleted atoms', deleted_atoms, section='cluster') cutting_setting_dict = { 'coord': coord, 'add_h': add_h, 'out_layer_number': out_layer_number } record_data_json(path, 'cutting setting', cutting_setting_dict, section='cluster') # get bilayer jobs rpa_jobs = get_jobs(path) cluster_jobs = [job for job in rpa_jobs if job.layertype == 'bilayer'] for job in cluster_jobs: if 'rpa' in job.path: job.path = job.path.replace('rpa', 'cluster') elif 'geo_opt' in job.path: job.path = job.path.replace('geo_opt', 'cluster') job.method = 'cluster' # generate clusters cluster_path = os.path.join(path, 'cluster') mkdir(cluster_path) Cluster.creat_json_file(cluster_path) for job in cluster_jobs: Clu = Cluster.ClusterCutter(job, center=center_atoms, name=name, fixed_atoms=fixed_atoms, factors=factors, cutting_setting=cutting_setting, deleted_atoms=deleted_atoms) if not Cluster.if_cluster_already_generated(job): Clu.get_cluster() if out_layer_number is True: Clu.write_xyz_with_layernumber() else: Clu.write_xyz() rec = 'Cluster Cutting finished!\n' rec += '***' * 25 print(rec) record(path, rec)
def write_xyz_with_layernumber(self, cluster=[]): if len(self.choosed_atoms) == 0: self.get_cluster() if len(cluster) == 0: cluster = self.choosed_atoms file_name = '{}_Cluster.xyz'.format(self.name) file_path = os.path.join(self.cluster_path, file_name) if not os.path.exists(self.cluster_path): mkdir(self.cluster_path) with open(file_path, 'w') as f: f.write(str(len(cluster)) + '\n') f.write('{}_Cluster'.format(self.name) + '\n') for atom in cluster: ele = periodic_table_rev[int(atom.nat)] f.write((str(ele) + str(atom.layer)).center(6) + ' ') f.write('{:.12E}'.format(float(atom.x)).rjust(19) + ' ') f.write('{:.12E}'.format(float(atom.y)).rjust(19) + ' ') f.write('{:.12E}'.format(float(atom.z)).rjust(19)) f.write('\n') rec = str(self.cluster_job) + '\n' rec += 'Geometry file generated.\n' rec += 'Number of atoms in cluster: {}\n'.format(len(cluster)) rec += '---' * 25 print(rec) record(self.cluster_job.root_path, rec)
def menu(): ini_path = os.path.dirname(os.path.realpath(__file__)) ini_path = os.path.join(ini_path, 'input.ini') Ini = ReadIni(ini_path) path = Ini.project_path start = Ini.start end = Ini.end test_begin(end, start) now = datetime.now() now = now.strftime("%b %d %Y %H:%M:%S") mkdir(path) rec = 'Project begins.' rec += '\n' + '***'*25 rename_file(path, 'record') record(path, rec, init=True) print('***'*25) print(now) print(rec) try: shutil.copy(ini_path, path + '/input.ini') except Exception as e: print(e) Pipeline.pipeline(path, start, end)
def write_lmp2_part1(self): mkdir(self.lmp2_path) with open(self.input_path, 'w') as f: f.write('READC14' + '\n') f.write('DUALBAS' + '\n') f.write('KNET' + '\n') f.write('8' + '\n') f.write('MEMORY' + '\n') f.write('40000' + '\n') f.write('NOSING\n')
def write_part1(self): mkdir(self.lmp2_path) with open(self.input_path, 'w') as f: f.write('READC14' + '\n') f.write('DUALBAS' + '\n') f.write('KNET' + '\n') f.write('8' + '\n') f.write('MEMORY' + '\n') f.write('40000' + '\n') f.write('NOSYM12' + '\n') f.write('CLUSCOR' + '\n') f.write('LPairlst' + '\n') f.write('100000' + '\n') f.write('MOG_DIST' + '\n') f.write('9 9' + '\n') f.write('NOSING' + '\n') f.write('NFITCEL' + '\n') f.write('-100' + '\n') f.write('STREAMIN' + '\n') f.write('SMAL3IDX' + '\n')
def copy_molpro_inp(self): mkdir(self.rpa_path) inp_from = self.lmp2_job.path inp_from = os.path.join(inp_from, 'molpro.inp') inp_to = os.path.join(self.rpa_path, 'rpa.inp') shutil.copy(inp_from, inp_to)