コード例 #1
0
ファイル: lmp2.py プロジェクト: cccccsf/single_point
def lmp2(path):

    rec = 'LMP2 Calculation begins.\n'
    rec += '---'*25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    ini = IniReader()
    nodes, cryscor_path = ini.get_lmp2()
    record_data_json(path, 'nodes', nodes, section='lmp2')

    # generation of INPUT
    bilayer_path = os.path.join(path, 'lmp2')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    lmp2_jobs_finished, lmp2_jobs_new = [], []
    if not LMP2.if_cal_finish(bilayer_job):
        inp = LMP2.Lmp2Input(bilayer_job)
        inp.write_input()
        lmp2_jobs_new.append(bilayer_job)
        LMP2.copy_submit_src(bilayer_job, nodes, cryscor_path)
    else:
        bilayer_job.status = 'finished'
        lmp2_jobs_finished.append(bilayer_job)
    if not LMP2.if_cal_finish(upper_job):
        inp = LMP2.Lmp2InputLayer(upper_job)
        inp.write_input()
        lmp2_jobs_new.append(upper_job)
        LMP2.copy_submit_src(upper_job, nodes, cryscor_path)
    else:
        upper_job.status = 'finished'
        lmp2_jobs_finished.append(upper_job)
    if not LMP2.if_cal_finish(under_job):
        inp = LMP2.Lmp2InputLayer(under_job)
        inp.write_input()
        lmp2_jobs_new.append(under_job)
        LMP2.copy_submit_src(under_job, nodes, cryscor_path)
    else:
        under_job.status = 'finished'
        lmp2_jobs_finished.append(under_job)

    # submit jobs
    if len(lmp2_jobs_new) > 0:
        new_finished_jobs = LMP2.submit(lmp2_jobs_new)
        lmp2_jobs_finished += new_finished_jobs

    # read and record results
    LMP2.lmp2_read_record_results(path, lmp2_jobs_finished)

    rec = 'LMP2 finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
コード例 #2
0
ファイル: rpa.py プロジェクト: cccccsf/single_point
def rpa(path):

    rec = 'LRPA begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    ini = IniReader()
    rpa_nodes_b, memory_b, rpa_nodes_s, memory_s, molpro_path, molpro_key = ini.get_rpa(
    )

    # generate Input file and scr file
    bilayer_path = os.path.join(path, 'rpa')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    rpa_jobs_finished, rpa_jobs_new = [], []
    if not RPA.if_cal_finish(bilayer_job):
        Inp = RPA.RPAInput(bilayer_job, memory_b)
        Inp.generate_input()
        Scr = RPA.Scr(bilayer_job, rpa_nodes_b, molpro_key, molpro_path)
        Scr.gen_scr()
        rpa_jobs_new.append(bilayer_job)
    else:
        bilayer_job.status = 'finished'
        rpa_jobs_finished.append(bilayer_job)
    for job in [upper_job, under_job]:
        if not RPA.if_cal_finish(job):
            Inp = RPA.RPAInput(job, memory_s)
            Inp.generate_input()
            Scr = RPA.Scr(job, rpa_nodes_s, molpro_key, molpro_path)
            Scr.gen_scr()
            rpa_jobs_new.append(job)
        else:
            job.status = 'finished'
            rpa_jobs_finished.append(job)

    # submit jobs
    if len(rpa_jobs_new) > 0:
        new_finished_jobs = RPA.submit(rpa_jobs_new)
        rpa_jobs_finished += new_finished_jobs
    # read and record results
    read_record_results(path, rpa_jobs_finished, RPA.get_energy, method='rpa')

    rec = 'LRPA finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
コード例 #3
0
 def test_get_energy():
     path = r'C:\Users\ccccc\Documents\Theoritische Chemie\Masterarbeit\test\hf_2\x_-0.150\z_-0.106'
     job = Job(path)
     energy, unit = get_energy(job)
     print(energy, unit)
     expected = '-2.726040216969E+03'
     assert (energy == expected)
コード例 #4
0
ファイル: geo_opt.py プロジェクト: cccccsf/single_point
def geo_opt(path):

    rec = 'Geometry Optimization begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    record_data_json(path, 'project_name', project_name)
    record_data_json(path, 'system_type', system_type)
    record_data_json(path, 'lattice_parameter', lattice_parameter)
    record_data_json(path, 'geometry', geometry)
    record_data_json(path, 'fixed_atoms', fixed_atoms)
    if isinstance(fixed_atoms, list) and len(fixed_atoms) == 2:
        geometry = Geometry(geometry=geometry, fixed_atoms=fixed_atoms)
    else:
        geometry = Geometry(geometry=geometry)
    bs, functional, nodes, crystal_path = Ini.get_geo_opt()
    record_data_json(path, 'basis_set', bs, section='geo_opt')
    record_data_json(path, 'functional', functional, section='geo_opt')
    record_data_json(path, 'nodes', nodes, section='geo_opt')

    job = os.path.join(path, 'geo_opt')
    job = Job(job)
    if not GeoOpt.if_job_finish(job):
        # generation of INPUT
        Geo_Inp = GeoOpt.Geo_Opt_Input(job, project_name, system_type,
                                       group_type, lattice_parameter, geometry,
                                       bs, functional)
        Geo_Inp.gen_input()
        # copy file and submit the job
        job = GeoOpt.submit(job, nodes, crystal_path, path)
    else:
        job.status = 'finished'

    # read and record the results
    GeoOpt.read_record_result(job, path)

    rec = 'Geometry optimization finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
コード例 #5
0
def get_jobs(path):
    path = os.path.join(path, 'cluster')
    walks = os.walk(path)
    jobs = set()
    root_jobs = set()
    for root, dirs, files in walks:
        if len(files) > 0:
            for file in files:
                if os.path.splitext(file)[-1] == '.out':
                    new_job = Job(root)
                    new_job.method = os.path.splitext(file)[0]
                    if if_cal_finish(new_job) and new_job not in jobs:
                        new_job.bs = get_bs_in_job(new_job)
                        jobs.add(new_job)
                        root_jobs.add(Job(root))
    jobs = list(jobs)
    root_jobs = list(root_jobs)
    return jobs, root_jobs
コード例 #6
0
ファイル: loc.py プロジェクト: cccccsf/single_point
def localization(path):

    rec = 'Localization begins.\n'
    rec += '---'*25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    ini = IniReader()
    nodes, crystal_path = ini.get_loc()
    record_data_json(path, 'nodes', nodes, section='loc')

    # generate jobs
    bilayer_path = os.path.join(path, 'loc')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    loc_jobs = [bilayer_job, upper_job, under_job]
    loc_jobs_finished, loc_jobs_new = [], []
    # check job and copy input file
    for job in loc_jobs:
        if not Loc.if_loc_finish(job):
            loc_jobs_new.append(job)
            Loc.copy_inp_file(job)
            Loc.copy_loc_scr(job, nodes, crystal_path)
        else:
            job.status = 'finished'
            loc_jobs_finished.append(job)

    # submit jobs
    if len(loc_jobs_new) > 0:
        new_finished_jobs = Loc.submit(loc_jobs_new)
        loc_jobs_finished += new_finished_jobs

    rec = 'Localization finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
コード例 #7
0
ファイル: cluster.py プロジェクト: cccccsf/single_point
def cluster(path):

    rec = 'Cluster Cutting begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    central_atoms, factors, deleted_atoms, coord, add_h, out_layer_number = Ini.get_cluster(
    )
    cutting_setting = [coord, add_h]
    record_data_json(path, 'central atoms', central_atoms, section='cluster')
    record_data_json(path, 'cutting factors', factors, section='cluster')
    record_data_json(path, 'deleted atoms', deleted_atoms, section='cluster')
    cutting_setting_dict = {
        'coord': coord,
        'add_h': add_h,
        'out_layer_number': out_layer_number
    }
    record_data_json(path,
                     'cutting setting',
                     cutting_setting_dict,
                     section='cluster')

    geo_file = os.path.join(os.path.join(path, 'geo_opt'), 'geo_opt.out')
    job = os.path.join(path, 'cluster')
    job = Job(job)

    Clu = Cluster.ClusterCutter(job,
                                geo_file,
                                factors=factors,
                                name=project_name,
                                central_atoms=central_atoms,
                                fixed_atoms=fixed_atoms,
                                cutting_setting=cutting_setting,
                                deleted_atoms=deleted_atoms)
    Clu.get_cluster()

    if out_layer_number is True:
        Clu.write_xyz_with_layernumber()
    else:
        Clu.write_xyz()

    rec = 'Cluster Cutting finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
コード例 #8
0
            f.write(self.molpro_form)
            f.write('\n')

    def write_loc(self):
        with open(self.input_file, 'a') as f:
            f.write('dfit,F4EXTSIZE=700,F3EXTSIZE=1100\n')
            f.write(
                'local,use_dist=0,keepcls=1,idist=0,loc_method=ibo,ivdist=0,interact=1,interpair=1,how_treatclswk=5\n'
            )
            f.write(
                'cfit,invsqrt=1,BASIS_MP2=avtz/mp2fit,BASIS_CCSD=avtz/mp2fit\n'
            )
            f.write('\n')


if __name__ == '__main__':
    from Components import Job

    path = r'C:\Users\ccccc\PycharmProjects\single_point\test\cluster'
    job = Job(path)
    job.method = 'per_bas_rpa_iext1'
    inp = os.path.join(path, 'per_bas_rpa_iext1.inp')
    job.input = inp
    name = 'blackP'
    memory = '2000'
    atom1 = ['3', '10', '24', '25']
    atom2 = ['8', '9', '16', '26']
    atoms = [atom1, atom2]
    Inp = InputPerRPA(job, name, memory, uc_atoms=atoms)
    Inp.gen_inp()
コード例 #9
0
def results(path):

    # get jobs
    correction_jobs, root_jobs = get_jobs(path)
    Ini = IniReader()
    unit = Ini.get_unit()

    # read results of correction if not found in 'results.json'
    results_file = os.path.join(path, 'results.json')
    if if_read_record_results_needed(correction_jobs, results_file):
        read_all_results(path, correction_jobs)

    # read formal results
    with open(results_file, 'r') as f:
        data = json.load(f)
    results_dict = {job.method: Results.Result(job, data['correction']['energy'][job.method][0], data['correction']['energy'][job.method][1]) for job in correction_jobs}

    # get extraplation valuess
    extrap_method_error = {}
    extrap_iext1_rpa = {}
    basis_set_correction = {}
    # method error correction
    if 'avdz_rpa_cc' in results_dict and 'avtz_rpa_cc' in results_dict:
        avdtz = Results.get_extrapolated_correction(results_dict['avdz_rpa_cc'], results_dict['avtz_rpa_cc'], 2, 3)
        avdtz.bs = 'avdtz'
        extrap_method_error['avdtz'] = avdtz
    if 'avtz_rpa_cc' in results_dict and 'avqz_rpa_cc' in results_dict:
        avtqz = Results.get_extrapolated_correction(results_dict['avtz_rpa_cc'], results_dict['avqz_rpa_cc'], 3, 4)
        avtqz.bs = 'avtqz'
        extrap_method_error['avtqz'] = avtqz
    for key, value in extrap_method_error.items():
        value.record_data_json(['method error', key])
    # basis set correction
    # iext1
    if 'avdz_iext1_rpa' in results_dict and 'avtz_iext1_rpa' in results_dict:
        avdtz = Results.get_extrapolated_correction(results_dict['avdz_iext1_rpa'], results_dict['avtz_iext1_rpa'], 2, 3)
        avdtz.bs = 'avdtz'
        extrap_iext1_rpa['avdtz'] = avdtz
    if 'avtz_iext1_rpa' in results_dict and 'avqz_iext1_rpa' in results_dict:
        avtqz = Results.get_extrapolated_correction(results_dict['avtz_iext1_rpa'], results_dict['avqz_iext1_rpa'], 3, 4)
        avtqz.bs = 'avtqz'
        extrap_iext1_rpa['avtqz'] = avtqz
    # get basis set correction
    if 'avdtz' in extrap_iext1_rpa:
        avdtz = extrap_iext1_rpa['avdtz'] - results_dict['per_bas_rpa_iext1']
        basis_set_correction['avdtz'] = avdtz
    if 'avtqz' in extrap_iext1_rpa:
        avtqz = extrap_iext1_rpa['avtqz'] - results_dict['per_bas_rpa_iext1']
        basis_set_correction['avtqz'] = avtqz
    for key, value in basis_set_correction.items():
        value.record_data_json(['basis set error', key])

    # HF
    hf_e, hf_unit = read_data_from_json(results_file, ['hf2', 'energy', 'layer_energy'])
    hf_job = Job(os.path.join(path, 'hf2'))
    hf_result = Results.Result(hf_job, energy=hf_e, unit=hf_unit)
    # embedded fragment LdrCCD (RPA)
    rpa_e, rpa_unit = read_data_from_json(results_file, ['rpa', 'energy', 'layer_energy'])
    rpa_job = Job(os.path.join(path, 'rpa'))
    rpa_result = Results.Result(rpa_job, rpa_e, rpa_unit)

    # final results
    final_data = {}
    # print(hf_result)
    # print(rpa_result)
    # print(extrap_method_error)
    # print(basis_set_correction)
    if 'avdtz' in extrap_method_error and 'avdtz' in basis_set_correction:
        avdtz = hf_result + rpa_result + extrap_method_error['avdtz'] + basis_set_correction['avdtz']
        final_data['avdtz'] = avdtz
    if 'avtqz' in extrap_method_error and 'avtqz' in basis_set_correction:
        avtqz = hf_result + rpa_result + extrap_method_error['avtqz'] + basis_set_correction['avtqz']
        final_data['avtqz'] = avtqz
    for key, value in final_data.items():
        value.record_data_json(['final result', key])

    # if needed, converte unit
    curr_unit = find_current_unit(final_data)
    if curr_unit.lower() != unit.lower():
        for value in final_data.values():
            value.energy = unit_transform(value.energy, curr_unit, unit)
            value.unit = unit
        for key, value in final_data.items():
            value.record_data_json(['final result', key])

    rec = 'Data processing finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
コード例 #10
0
ファイル: correction.py プロジェクト: cccccsf/single_point
def correction(path):

    rec = 'Correction begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, *_ = Ini.get_basic_info()
    nodes, memorys, bs, molpro_path, molpro_key, atoms = Ini.get_correction()
    record_data_json(path, 'memorys', memorys, section='correction')
    record_data_json(path, 'nodes', nodes, section='correction')
    # prepare input
    cluster_path = os.path.join(path, 'cluster')
    missions, nodes, memorys = get_missions(memorys, nodes)

    # prepare input
    inputs = list(missions)
    inputs = [inp + '.inp' for inp in inputs]
    inputs_files = [os.path.join(cluster_path, inp) for inp in inputs]
    correction_jobs = []
    correction_jobs_finished = []
    for inp in inputs_files:
        inp_inp = os.path.join(path, os.path.split(inp)[-1])
        job = Job(cluster_path)
        job.method = os.path.split(inp)[-1].split('.')[0]
        job.input = inp
        if not Correction.if_cal_finish(job):
            correction_jobs.append(job)
            if not os.path.exists(inp) and not os.path.exists(inp_inp):
                print('{} file not found.'.format(inp))
                print('Program will generate the input automatically.')
                if job.method.startswith('per'):
                    Inp = Correction.InputPerRPA(job,
                                                 project_name,
                                                 memorys[job.method],
                                                 uc_atoms=atoms)
                    Inp.gen_inp()
                elif job.method.endswith('rpa_cc'):
                    Inp = Correction.InputRPACC(job,
                                                project_name,
                                                memorys[job.method],
                                                uc_atoms=atoms)
                    Inp.gen_inp()
                elif job.method.endswith('iext1_rpa'):
                    Inp = Correction.InputIext1RPA(job,
                                                   project_name,
                                                   memorys[job.method],
                                                   uc_atoms=atoms)
                    Inp.gen_inp()
            elif not os.path.exists(inp):
                shutil.copy(inp_inp, inp)

        else:
            job.status = 'finished'
            correction_jobs_finished.append(job)

    # generate scr
    for job in correction_jobs:
        Src = Correction.Script(job, nodes[job.method], molpro_key,
                                molpro_path)
        Src.write_scr()

    # submit jobs
    if len(correction_jobs) > 0:
        new_finished_jobs = Correction.submit(correction_jobs)
        correction_jobs_finished += new_finished_jobs

    # read and record all results
    if len(correction_jobs_finished) > 0:
        Correction.read_all_results(path, correction_jobs_finished)

    rec = 'Correction finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
コード例 #11
0
def hf2(path):

    rec = 'Second Hartree Fock Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    bs, nodes, crystal_path = Ini.get_hf2()
    record_data_json(path, 'basis_set', bs, section='hf2')
    record_data_json(path, 'nodes', nodes, section='hf2')

    # generation of INPUT
    bilayer_path = os.path.join(path, 'hf2')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    hf2_jobs_finished, hf2_jobs_new = [], []
    if not HF2.if_cal_finish(bilayer_job):
        inp = HF2.Input(bilayer_job,
                        project_name,
                        system_type,
                        group_type,
                        bs_type=bs,
                        fixed_atoms=fixed_atoms)
        inp.gen_input()
        HF2.copy_submit_scr(bilayer_job, nodes, crystal_path)
        hf2_jobs_new.append(bilayer_job)
    else:
        bilayer_job.status = 'finished'
        hf2_jobs_finished.append(bilayer_job)
    if not HF2.if_cal_finish(upper_job):
        inp = HF2.LayerInp(upper_job,
                           project_name,
                           system_type,
                           group_type,
                           bs_type=bs,
                           layertype='upperlayer',
                           fixed_atoms=fixed_atoms)
        inp.gen_input()
        HF2.copy_submit_scr(upper_job, nodes, crystal_path)
        hf2_jobs_new.append(upper_job)
    else:
        upper_job.status = 'finished'
        hf2_jobs_finished.append(upper_job)
    if not HF2.if_cal_finish(under_job):
        inp = HF2.LayerInp(under_job,
                           project_name,
                           system_type,
                           group_type,
                           bs_type=bs,
                           layertype='underlayer',
                           fixed_atoms=fixed_atoms)
        inp.gen_input()
        HF2.copy_submit_scr(under_job, nodes, crystal_path)
        hf2_jobs_new.append(under_job)
    else:
        under_job.status = 'finished'
        hf2_jobs_finished.append(under_job)

    # copy files and submit jobs
    if len(hf2_jobs_new) > 0:
        new_finished_jobs = HF2.submit(hf2_jobs_new)
        hf2_jobs_finished += new_finished_jobs

    # read and record results
    HF2.read_record_results(path, hf2_jobs_finished)

    rec = 'HF2 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
コード例 #12
0
ファイル: hf1.py プロジェクト: cccccsf/single_point
def hf1(path):

    rec = 'First Hartree Fock Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    bs, nodes, crystal_path = Ini.get_hf1()
    record_data_json(path, 'basis_set', bs, section='hf1')
    record_data_json(path, 'nodes', nodes, section='hf1')

    # generation of INPUT
    bilayer_path = os.path.join(path, 'hf1')
    job = Job(bilayer_path)
    hf1_jobs = []
    hf1_jobs_finished = []
    if not HF1.if_cal_finish(job):
        Inp = HF1.Input(job,
                        project_name,
                        system_type,
                        group_type,
                        bs,
                        fiexed_atoms=fixed_atoms)
        Inp.gen_input()
        hf1_jobs.append(job)
        HF1.copy_submit_scr(job, nodes, crystal_path)
    else:
        job.status = 'finished'
        hf1_jobs_finished.append(job)
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    upper_job = Job(upper_path)
    if not HF1.if_cal_finish(upper_job):
        Inp = HF1.Layer_Inp(upper_job,
                            project_name,
                            system_type,
                            group_type,
                            bs,
                            fiexed_atoms=fixed_atoms,
                            layertype='upperlayer')
        Inp.gen_input()
        hf1_jobs.append(upper_job)
        HF1.copy_submit_scr(upper_job, nodes, crystal_path)
    else:
        upper_job.status = 'finished'
        hf1_jobs_finished.append(upper_job)
    under_path = os.path.join(bilayer_path, 'underlayer')
    under_job = Job(under_path)
    if not HF1.if_cal_finish(under_job):
        Inp = HF1.Layer_Inp(under_job,
                            project_name,
                            system_type,
                            group_type,
                            bs,
                            fiexed_atoms=fixed_atoms,
                            layertype='underlayer')
        Inp.gen_input()
        hf1_jobs.append(under_job)
        HF1.copy_submit_scr(upper_job, nodes, crystal_path)
    else:
        under_job.status = 'finished'
        hf1_jobs_finished.append(under_job)

    # copy files and submit jobs
    new_finished_jobs = HF1.submit(hf1_jobs)
    hf1_jobs_finished += new_finished_jobs

    # read and record the results
    HF1.read_record_results(path, hf1_jobs_finished)

    rec = 'HF1 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)