Ejemplo n.º 1
0
def read_record_results(path, jobs):
    energy_dict = {}
    for job in jobs:
        if job.layertype == 'bilayer':
            energy, unit = get_energy(job)
            energy = [energy, unit]
            energy_dict['bilayer'] = energy
        elif job.layertype == 'upperlayer':
            energy, unit = get_energy(job)
            energy = [energy, unit]
            energy_dict['upperlayer'] = energy
        elif job.layertype == 'underlayer':
            energy, unit = get_energy(job)
            energy = [energy, unit]
            energy_dict['underlayer'] = energy
    layer_energy = cal_layer_energy(energy_dict['bilayer'],
                                    energy_dict['upperlayer'],
                                    energy_dict['underlayer'])
    energy_dict['layer_energy'] = layer_energy
    record_data_json(path, 'energy', energy_dict, section='hf2')
    record_data_csv(
        path,
        'hf2', [
            energy_dict['bilayer'][0], energy_dict['upperlayer'][0],
            energy_dict['underlayer'][0], energy_dict['layer_energy'][0]
        ],
        layer='whole layer')
    rec = 'Results readed.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 2
0
def submit(jobs):
    job_num = len(jobs)
    max_paralell = 3
    count = 0
    submitted_jobs = []
    finished_jobs = []

    def test_finished(jobs):
        nonlocal count
        for job in jobs[:]:
            if if_cal_finish(job):
                finished_jobs.append(job)
                rec = job.path
                rec += '\n'
                rec += 'calculation finished.\n'
                rec += '---' * 25
                print(rec)
                record(job.root_path, rec)
                count -= 1
                jobs.remove(job)

    # test if there is some job which is already finished
    for job in jobs[:]:
        # print(job)
        if if_cal_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)

    # submit and detect statu of jobs
    j = 0
    while True:
        test_finished(submitted_jobs)
        if len(finished_jobs) == job_num and len(submitted_jobs) == 0:
            break
        else:
            if count < max_paralell and len(jobs) != 0:
                new_job = jobs.pop()
                os.chdir(new_job.path)
                rename_file(new_job.path, 'hf.out')
                rename_file(new_job.path, 'fort.9')
                out = submit_hf1_job()
                count += 1
                submitted_jobs.append(new_job)
                rec = new_job.path + '\n'
                rec += 'job submitted.'
                rec += '\n' + out + '\n'
                rec += '---' * 25
                record(new_job.root_path, rec)
                print(rec)
            else:
                time.sleep(500)
                j += 1
                if j > 35:
                    rec = 'noting changes...'
                    record(submitted_jobs[0].root_path, rec)
                    j = 0
                continue

    return finished_jobs
Ejemplo n.º 3
0
def lmp2(path):

    rec = 'LMP2 Calculation begins.\n'
    rec += '---'*25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    ini = IniReader()
    nodes, cryscor_path = ini.get_lmp2()
    record_data_json(path, 'nodes', nodes, section='lmp2')

    # generation of INPUT
    bilayer_path = os.path.join(path, 'lmp2')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    lmp2_jobs_finished, lmp2_jobs_new = [], []
    if not LMP2.if_cal_finish(bilayer_job):
        inp = LMP2.Lmp2Input(bilayer_job)
        inp.write_input()
        lmp2_jobs_new.append(bilayer_job)
        LMP2.copy_submit_src(bilayer_job, nodes, cryscor_path)
    else:
        bilayer_job.status = 'finished'
        lmp2_jobs_finished.append(bilayer_job)
    if not LMP2.if_cal_finish(upper_job):
        inp = LMP2.Lmp2InputLayer(upper_job)
        inp.write_input()
        lmp2_jobs_new.append(upper_job)
        LMP2.copy_submit_src(upper_job, nodes, cryscor_path)
    else:
        upper_job.status = 'finished'
        lmp2_jobs_finished.append(upper_job)
    if not LMP2.if_cal_finish(under_job):
        inp = LMP2.Lmp2InputLayer(under_job)
        inp.write_input()
        lmp2_jobs_new.append(under_job)
        LMP2.copy_submit_src(under_job, nodes, cryscor_path)
    else:
        under_job.status = 'finished'
        lmp2_jobs_finished.append(under_job)

    # submit jobs
    if len(lmp2_jobs_new) > 0:
        new_finished_jobs = LMP2.submit(lmp2_jobs_new)
        lmp2_jobs_finished += new_finished_jobs

    # read and record results
    LMP2.lmp2_read_record_results(path, lmp2_jobs_finished)

    rec = 'LMP2 finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
Ejemplo n.º 4
0
def submit(jobs):

    jobs_len = len(jobs)
    max_paralell = 5
    count = 0
    submitted_jobs = []
    finished_jobs = []

    def test_finished(jobs):
        nonlocal count
        for job in jobs[:]:
            if if_loc_finish(job):
                finished_jobs.append(job)
                rec = job.path
                rec += '\n'
                rec += 'Localization finished.\n'
                rec += '---'*25
                print(rec)
                record(job.root_path, rec)
                jobs.remove(job)
                count -= 1

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_loc_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)

    # submit and detect all jobs
    j = 0
    while True:
        test_finished(submitted_jobs)
        if len(finished_jobs) == jobs_len and len(submitted_jobs) == 0:
            break
        else:
            if count <= max_paralell and len(jobs) != 0:
                new_job = jobs.pop()
                os.chdir(new_job.path)
                out = submit_loc_job()
                count += 1
                submitted_jobs.append(new_job)
                rec = new_job.path + '\n'
                rec += 'job submitted.'
                rec += '\n' + out + '\n'
                rec += '---'*25
                record(new_job.root_path, rec)
                print(rec)
            else:
                time.sleep(500)
                j += 1
                if j > 12:
                    rec = 'noting changes...'
                    record(submitted_jobs[0].root_path, rec)
                    j = 0
                continue

    return finished_jobs
Ejemplo n.º 5
0
def pipeline():

    Ini = IniReader()
    path = Ini.project_path
    start = Ini.start
    end = Ini.end

    now = datetime.now()
    now = now.strftime("%b %d %Y %H:%M:%S")
    rec = 'Project begins.'
    rec += '\n' + '***' * 25
    rename_file(path, 'record')
    record(path, rec, init=True)
    print('***' * 25)
    print(now)
    print(rec)
    mkdir(path)
    try:
        shutil.copy(Ini.ini_path, path + '/input.ini')
    except Exception as e:
        print(e)

    anchor = start
    while anchor < end:
        # print(anchor, end)
        if anchor == 0:
            GeoOpt.geo_opt(path)
        elif anchor == 1:
            HF1.hf1(path)
        elif anchor == 2:
            Loc.localization(path)
        elif anchor == 3:
            HF2.hf2(path)
        elif anchor == 4:
            LMP2.lmp2(path)
        elif anchor == 5:
            RPA.rpa(path)
        elif anchor == 6:
            Cluster.cluster(path)
        elif anchor == 7:
            Correction.correction(path)
        elif anchor == 8:
            Results.results(path)
        anchor += 1

    now = datetime.now()
    now = now.strftime("%b %d %Y %H:%M:%S")
    rec = 'Project End.\n'
    rec += '***' * 25
    rename_file(path, 'record')
    record(path, rec, init=True)
    print(now)
    print(rec)
Ejemplo n.º 6
0
def read_all_results(path, jobs):
    Results = [Result(job) for job in jobs]
    for res in Results:
        res.get_energy()
        res.unit_transform()
    energy_dict = {res.method: [res.energy, res.unit] for res in Results}
    record_data_json(path, 'energy', energy_dict, section='correction')
    for res in Results:
        record_data_csv(path, res.method, res.energy, layer='interlayer')
    rec = 'Results readed.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 7
0
 def test_finished(jobs):
     nonlocal count
     for job in jobs[:]:
         if if_cal_finish(job):
             finished_jobs.append(job)
             rec = job.path
             rec += '\n'
             rec += 'calculation finished.\n'
             rec += '---' * 25
             print(rec)
             record(job.root_path, rec)
             jobs.remove(job)
             count -= 1
Ejemplo n.º 8
0
def rpa(path):

    rec = 'LRPA begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    ini = IniReader()
    rpa_nodes_b, memory_b, rpa_nodes_s, memory_s, molpro_path, molpro_key = ini.get_rpa(
    )

    # generate Input file and scr file
    bilayer_path = os.path.join(path, 'rpa')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    rpa_jobs_finished, rpa_jobs_new = [], []
    if not RPA.if_cal_finish(bilayer_job):
        Inp = RPA.RPAInput(bilayer_job, memory_b)
        Inp.generate_input()
        Scr = RPA.Scr(bilayer_job, rpa_nodes_b, molpro_key, molpro_path)
        Scr.gen_scr()
        rpa_jobs_new.append(bilayer_job)
    else:
        bilayer_job.status = 'finished'
        rpa_jobs_finished.append(bilayer_job)
    for job in [upper_job, under_job]:
        if not RPA.if_cal_finish(job):
            Inp = RPA.RPAInput(job, memory_s)
            Inp.generate_input()
            Scr = RPA.Scr(job, rpa_nodes_s, molpro_key, molpro_path)
            Scr.gen_scr()
            rpa_jobs_new.append(job)
        else:
            job.status = 'finished'
            rpa_jobs_finished.append(job)

    # submit jobs
    if len(rpa_jobs_new) > 0:
        new_finished_jobs = RPA.submit(rpa_jobs_new)
        rpa_jobs_finished += new_finished_jobs
    # read and record results
    read_record_results(path, rpa_jobs_finished, RPA.get_energy, method='rpa')

    rec = 'LRPA finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 9
0
 def test_finished(jobs):
     nonlocal count
     for job in jobs[:]:
         if if_cal_finish(job):
             finished_jobs.append(job)
             rec = job.path + '\n'
             rec += job.method + '\n'
             rec += 'calculation finished...\n'
             rec += '---' * 25
             print(rec)
             record(job.root_path, rec)
             jobs.remove(job)
             count -= 1
             count_dict[job.parameter['node']] -= 1
Ejemplo n.º 10
0
def cluster(path):

    rec = 'Cluster Cutting begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    central_atoms, factors, deleted_atoms, coord, add_h, out_layer_number = Ini.get_cluster(
    )
    cutting_setting = [coord, add_h]
    record_data_json(path, 'central atoms', central_atoms, section='cluster')
    record_data_json(path, 'cutting factors', factors, section='cluster')
    record_data_json(path, 'deleted atoms', deleted_atoms, section='cluster')
    cutting_setting_dict = {
        'coord': coord,
        'add_h': add_h,
        'out_layer_number': out_layer_number
    }
    record_data_json(path,
                     'cutting setting',
                     cutting_setting_dict,
                     section='cluster')

    geo_file = os.path.join(os.path.join(path, 'geo_opt'), 'geo_opt.out')
    job = os.path.join(path, 'cluster')
    job = Job(job)

    Clu = Cluster.ClusterCutter(job,
                                geo_file,
                                factors=factors,
                                name=project_name,
                                central_atoms=central_atoms,
                                fixed_atoms=fixed_atoms,
                                cutting_setting=cutting_setting,
                                deleted_atoms=deleted_atoms)
    Clu.get_cluster()

    if out_layer_number is True:
        Clu.write_xyz_with_layernumber()
    else:
        Clu.write_xyz()

    rec = 'Cluster Cutting finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 11
0
def geo_opt(path):

    rec = 'Geometry Optimization begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    record_data_json(path, 'project_name', project_name)
    record_data_json(path, 'system_type', system_type)
    record_data_json(path, 'lattice_parameter', lattice_parameter)
    record_data_json(path, 'geometry', geometry)
    record_data_json(path, 'fixed_atoms', fixed_atoms)
    if isinstance(fixed_atoms, list) and len(fixed_atoms) == 2:
        geometry = Geometry(geometry=geometry, fixed_atoms=fixed_atoms)
    else:
        geometry = Geometry(geometry=geometry)
    bs, functional, nodes, crystal_path = Ini.get_geo_opt()
    record_data_json(path, 'basis_set', bs, section='geo_opt')
    record_data_json(path, 'functional', functional, section='geo_opt')
    record_data_json(path, 'nodes', nodes, section='geo_opt')

    job = os.path.join(path, 'geo_opt')
    job = Job(job)
    if not GeoOpt.if_job_finish(job):
        # generation of INPUT
        Geo_Inp = GeoOpt.Geo_Opt_Input(job, project_name, system_type,
                                       group_type, lattice_parameter, geometry,
                                       bs, functional)
        Geo_Inp.gen_input()
        # copy file and submit the job
        job = GeoOpt.submit(job, nodes, crystal_path, path)
    else:
        job.status = 'finished'

    # read and record the results
    GeoOpt.read_record_result(job, path)

    rec = 'Geometry optimization finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 12
0
def read_record_result(job, path):
    job_path = job.path
    out_file = os.path.join(job_path, 'geo_opt.out')
    lattice_parameter, geometry = get_optimized_geometry(out_file)
    energy, unit = get_optimized_energy(out_file)
    record_data_json(path, 'unit', unit, section='geo_opt')
    record_data_json(path,
                     'optimized_lattice_parameter',
                     lattice_parameter,
                     section='geo_opt')
    record_data_json(path, 'optimized_geometry', geometry, section='geo_opt')
    record_data_json(path, 'energy', energy, section='geo_opt')
    record_data_csv(path, 'geo_opt', energy)
    rec = job.path + '\n'
    rec += 'Output Info readed.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 13
0
 def test_finished(jobs):
     """
     test jobs which have benn submittdt is finished or not
     if a job finished, add it to list finished_jobs, and delete it from list submitted_jobs
     :param submitted_jobs:
     :return:
     """
     nonlocal count
     for job in jobs[:]:
         if if_cal_finish(job):
             finished_jobs.append(job)
             rec = job.path
             rec += '\n'
             rec += 'calculation finished...\n'
             rec += '---' * 25
             print(rec)
             record(job.root_path, rec)
             jobs.remove(job)
             count -= 1
Ejemplo n.º 14
0
def localization(path):

    rec = 'Localization begins.\n'
    rec += '---'*25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    ini = IniReader()
    nodes, crystal_path = ini.get_loc()
    record_data_json(path, 'nodes', nodes, section='loc')

    # generate jobs
    bilayer_path = os.path.join(path, 'loc')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    loc_jobs = [bilayer_job, upper_job, under_job]
    loc_jobs_finished, loc_jobs_new = [], []
    # check job and copy input file
    for job in loc_jobs:
        if not Loc.if_loc_finish(job):
            loc_jobs_new.append(job)
            Loc.copy_inp_file(job)
            Loc.copy_loc_scr(job, nodes, crystal_path)
        else:
            job.status = 'finished'
            loc_jobs_finished.append(job)

    # submit jobs
    if len(loc_jobs_new) > 0:
        new_finished_jobs = Loc.submit(loc_jobs_new)
        loc_jobs_finished += new_finished_jobs

    rec = 'Localization finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
Ejemplo n.º 15
0
 def write_xyz_with_layernumber(self, cluster=[]):
     if len(self.choosed_atoms) == 0:
         self.get_cluster()
     if len(cluster) == 0:
         cluster = self.choosed_atoms
     file_name = '{}_Cluster.xyz'.format(self.name)
     file_path = os.path.join(self.path, file_name)
     with open(file_path, 'w') as f:
         f.write(str(len(cluster)) + '\n')
         f.write('{}_Cluster'.format(self.name) + '\n')
         for atom in cluster:
             ele = periodic_table_rev[int(atom.nat)]
             f.write((str(ele) + str(atom.layer)).center(6) + ' ')
             f.write('{:.12E}'.format(float(atom.x)).rjust(19) + ' ')
             f.write('{:.12E}'.format(float(atom.y)).rjust(19) + ' ')
             f.write('{:.12E}'.format(float(atom.z)).rjust(19))
             f.write('\n')
         rec = 'Geometry file generated.\n'
         rec += 'Number of atoms in cluster: {}\n'.format(len(cluster))
         rec += '---' * 25
         print(rec)
         record(self.job.root_path, rec)
Ejemplo n.º 16
0
def submit(job, nodes, crystal_path, path):

    if not GeoOpt.if_job_finish(job):
        copy_submit_scr(job, nodes, crystal_path)
        rename_file(job.path, 'geo_opt.out')
        out = submit_geo_opt_job()
        rec = job.path
        rec += '\n'
        rec += 'job submitted...'
        rec += '\n' + out + '\n'
        rec += '---'*25
        print(rec)
        record(path, rec)
        r = 0
        while True:
            if GeoOpt.if_job_finish(job):
                rec = 'calculation finished.\n'
                rec += '---'*25
                print(rec)
                record(path, rec)
                job.status = 'finished'
                break
            else:
                time.sleep(500)
                r += 1
                if r > 15:
                    rec = 'calculation still not finished.\n'
                    rec += '---'*25
                    print(rec)
                    record(path, rec)
                    r = 0
                continue
    else:
        job.status = 'finished'

    return job
Ejemplo n.º 17
0
def hf2(path):

    rec = 'Second Hartree Fock Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    bs, nodes, crystal_path = Ini.get_hf2()
    record_data_json(path, 'basis_set', bs, section='hf2')
    record_data_json(path, 'nodes', nodes, section='hf2')

    # generation of INPUT
    bilayer_path = os.path.join(path, 'hf2')
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    under_path = os.path.join(bilayer_path, 'underlayer')
    bilayer_job = Job(bilayer_path)
    upper_job = Job(upper_path)
    under_job = Job(under_path)
    hf2_jobs_finished, hf2_jobs_new = [], []
    if not HF2.if_cal_finish(bilayer_job):
        inp = HF2.Input(bilayer_job,
                        project_name,
                        system_type,
                        group_type,
                        bs_type=bs,
                        fixed_atoms=fixed_atoms)
        inp.gen_input()
        HF2.copy_submit_scr(bilayer_job, nodes, crystal_path)
        hf2_jobs_new.append(bilayer_job)
    else:
        bilayer_job.status = 'finished'
        hf2_jobs_finished.append(bilayer_job)
    if not HF2.if_cal_finish(upper_job):
        inp = HF2.LayerInp(upper_job,
                           project_name,
                           system_type,
                           group_type,
                           bs_type=bs,
                           layertype='upperlayer',
                           fixed_atoms=fixed_atoms)
        inp.gen_input()
        HF2.copy_submit_scr(upper_job, nodes, crystal_path)
        hf2_jobs_new.append(upper_job)
    else:
        upper_job.status = 'finished'
        hf2_jobs_finished.append(upper_job)
    if not HF2.if_cal_finish(under_job):
        inp = HF2.LayerInp(under_job,
                           project_name,
                           system_type,
                           group_type,
                           bs_type=bs,
                           layertype='underlayer',
                           fixed_atoms=fixed_atoms)
        inp.gen_input()
        HF2.copy_submit_scr(under_job, nodes, crystal_path)
        hf2_jobs_new.append(under_job)
    else:
        under_job.status = 'finished'
        hf2_jobs_finished.append(under_job)

    # copy files and submit jobs
    if len(hf2_jobs_new) > 0:
        new_finished_jobs = HF2.submit(hf2_jobs_new)
        hf2_jobs_finished += new_finished_jobs

    # read and record results
    HF2.read_record_results(path, hf2_jobs_finished)

    rec = 'HF2 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 18
0
def submit(jobs):

    total_num = len(jobs)
    count = 0
    submitted_jobs = []
    finished_jobs = []
    max_calculations_dict = {'12': 5, '28': 3}

    def test_finished(jobs):
        nonlocal count
        for job in jobs[:]:
            if if_cal_finish(job):
                finished_jobs.append(job)
                rec = job.path + '\n'
                rec += job.method + '\n'
                rec += 'calculation finished...\n'
                rec += '---' * 25
                print(rec)
                record(job.root_path, rec)
                jobs.remove(job)
                count -= 1
                count_dict[job.parameter['node']] -= 1

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_cal_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)

    # categorize jobs according to the nodes number
    jobs_dict = {}
    count_dict = {}
    nodes_list = []
    for job in jobs:
        node = job.parameter['node']
        if node not in nodes_list:
            nodes_list.append(node)
            jobs_dict[node] = [job]
            count_dict[node] = 0
        else:
            jobs_dict[node].append(job)

    # submit and detect all jobs
    j = 0
    while True:
        test_finished(jobs)
        if len(finished_jobs) == total_num and len(submitted_jobs) == 0:
            break
        else:
            for node in nodes_list:
                if count_dict[node] < max_calculations_dict[node] and len(
                        jobs_dict[node]) > 0:
                    new_job = jobs_dict[node].pop()
                    os.chdir(new_job.path)
                    rename_file(new_job.path, '{}.out'.format(new_job.method))
                    out = submit_job(new_job)
                    count += 1
                    count_dict[node] += 1
                    submitted_jobs.append(new_job)
                    rec = new_job.path + '\n'
                    rec += new_job.method + '\n'
                    rec += 'job submitted...'
                    rec += '\n' + out + '\n'
                    rec += '---' * 25
                    record(new_job.root_path, rec)
                    print(rec)
                else:
                    time.sleep(500)
                    j += 1
                    if j > 8:
                        rec = 'noting changes.\n'
                        record(submitted_jobs[0].root_path, rec)
                        j = 0
                    continue

    return finished_jobs
Ejemplo n.º 19
0
def hf1(path):

    rec = 'First Hartree Fock Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, system_type, group_type, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    bs, nodes, crystal_path = Ini.get_hf1()
    record_data_json(path, 'basis_set', bs, section='hf1')
    record_data_json(path, 'nodes', nodes, section='hf1')

    # generation of INPUT
    bilayer_path = os.path.join(path, 'hf1')
    job = Job(bilayer_path)
    hf1_jobs = []
    hf1_jobs_finished = []
    if not HF1.if_cal_finish(job):
        Inp = HF1.Input(job,
                        project_name,
                        system_type,
                        group_type,
                        bs,
                        fiexed_atoms=fixed_atoms)
        Inp.gen_input()
        hf1_jobs.append(job)
        HF1.copy_submit_scr(job, nodes, crystal_path)
    else:
        job.status = 'finished'
        hf1_jobs_finished.append(job)
    upper_path = os.path.join(bilayer_path, 'upperlayer')
    upper_job = Job(upper_path)
    if not HF1.if_cal_finish(upper_job):
        Inp = HF1.Layer_Inp(upper_job,
                            project_name,
                            system_type,
                            group_type,
                            bs,
                            fiexed_atoms=fixed_atoms,
                            layertype='upperlayer')
        Inp.gen_input()
        hf1_jobs.append(upper_job)
        HF1.copy_submit_scr(upper_job, nodes, crystal_path)
    else:
        upper_job.status = 'finished'
        hf1_jobs_finished.append(upper_job)
    under_path = os.path.join(bilayer_path, 'underlayer')
    under_job = Job(under_path)
    if not HF1.if_cal_finish(under_job):
        Inp = HF1.Layer_Inp(under_job,
                            project_name,
                            system_type,
                            group_type,
                            bs,
                            fiexed_atoms=fixed_atoms,
                            layertype='underlayer')
        Inp.gen_input()
        hf1_jobs.append(under_job)
        HF1.copy_submit_scr(upper_job, nodes, crystal_path)
    else:
        under_job.status = 'finished'
        hf1_jobs_finished.append(under_job)

    # copy files and submit jobs
    new_finished_jobs = HF1.submit(hf1_jobs)
    hf1_jobs_finished += new_finished_jobs

    # read and record the results
    HF1.read_record_results(path, hf1_jobs_finished)

    rec = 'HF1 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 20
0
def results(path):

    # get jobs
    correction_jobs, root_jobs = get_jobs(path)
    Ini = IniReader()
    unit = Ini.get_unit()

    # read results of correction if not found in 'results.json'
    results_file = os.path.join(path, 'results.json')
    if if_read_record_results_needed(correction_jobs, results_file):
        read_all_results(path, correction_jobs)

    # read formal results
    with open(results_file, 'r') as f:
        data = json.load(f)
    results_dict = {job.method: Results.Result(job, data['correction']['energy'][job.method][0], data['correction']['energy'][job.method][1]) for job in correction_jobs}

    # get extraplation valuess
    extrap_method_error = {}
    extrap_iext1_rpa = {}
    basis_set_correction = {}
    # method error correction
    if 'avdz_rpa_cc' in results_dict and 'avtz_rpa_cc' in results_dict:
        avdtz = Results.get_extrapolated_correction(results_dict['avdz_rpa_cc'], results_dict['avtz_rpa_cc'], 2, 3)
        avdtz.bs = 'avdtz'
        extrap_method_error['avdtz'] = avdtz
    if 'avtz_rpa_cc' in results_dict and 'avqz_rpa_cc' in results_dict:
        avtqz = Results.get_extrapolated_correction(results_dict['avtz_rpa_cc'], results_dict['avqz_rpa_cc'], 3, 4)
        avtqz.bs = 'avtqz'
        extrap_method_error['avtqz'] = avtqz
    for key, value in extrap_method_error.items():
        value.record_data_json(['method error', key])
    # basis set correction
    # iext1
    if 'avdz_iext1_rpa' in results_dict and 'avtz_iext1_rpa' in results_dict:
        avdtz = Results.get_extrapolated_correction(results_dict['avdz_iext1_rpa'], results_dict['avtz_iext1_rpa'], 2, 3)
        avdtz.bs = 'avdtz'
        extrap_iext1_rpa['avdtz'] = avdtz
    if 'avtz_iext1_rpa' in results_dict and 'avqz_iext1_rpa' in results_dict:
        avtqz = Results.get_extrapolated_correction(results_dict['avtz_iext1_rpa'], results_dict['avqz_iext1_rpa'], 3, 4)
        avtqz.bs = 'avtqz'
        extrap_iext1_rpa['avtqz'] = avtqz
    # get basis set correction
    if 'avdtz' in extrap_iext1_rpa:
        avdtz = extrap_iext1_rpa['avdtz'] - results_dict['per_bas_rpa_iext1']
        basis_set_correction['avdtz'] = avdtz
    if 'avtqz' in extrap_iext1_rpa:
        avtqz = extrap_iext1_rpa['avtqz'] - results_dict['per_bas_rpa_iext1']
        basis_set_correction['avtqz'] = avtqz
    for key, value in basis_set_correction.items():
        value.record_data_json(['basis set error', key])

    # HF
    hf_e, hf_unit = read_data_from_json(results_file, ['hf2', 'energy', 'layer_energy'])
    hf_job = Job(os.path.join(path, 'hf2'))
    hf_result = Results.Result(hf_job, energy=hf_e, unit=hf_unit)
    # embedded fragment LdrCCD (RPA)
    rpa_e, rpa_unit = read_data_from_json(results_file, ['rpa', 'energy', 'layer_energy'])
    rpa_job = Job(os.path.join(path, 'rpa'))
    rpa_result = Results.Result(rpa_job, rpa_e, rpa_unit)

    # final results
    final_data = {}
    # print(hf_result)
    # print(rpa_result)
    # print(extrap_method_error)
    # print(basis_set_correction)
    if 'avdtz' in extrap_method_error and 'avdtz' in basis_set_correction:
        avdtz = hf_result + rpa_result + extrap_method_error['avdtz'] + basis_set_correction['avdtz']
        final_data['avdtz'] = avdtz
    if 'avtqz' in extrap_method_error and 'avtqz' in basis_set_correction:
        avtqz = hf_result + rpa_result + extrap_method_error['avtqz'] + basis_set_correction['avtqz']
        final_data['avtqz'] = avtqz
    for key, value in final_data.items():
        value.record_data_json(['final result', key])

    # if needed, converte unit
    curr_unit = find_current_unit(final_data)
    if curr_unit.lower() != unit.lower():
        for value in final_data.values():
            value.energy = unit_transform(value.energy, curr_unit, unit)
            value.unit = unit
        for key, value in final_data.items():
            value.record_data_json(['final result', key])

    rec = 'Data processing finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
Ejemplo n.º 21
0
def correction(path):

    rec = 'Correction begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = IniReader()
    project_name, *_ = Ini.get_basic_info()
    nodes, memorys, bs, molpro_path, molpro_key, atoms = Ini.get_correction()
    record_data_json(path, 'memorys', memorys, section='correction')
    record_data_json(path, 'nodes', nodes, section='correction')
    # prepare input
    cluster_path = os.path.join(path, 'cluster')
    missions, nodes, memorys = get_missions(memorys, nodes)

    # prepare input
    inputs = list(missions)
    inputs = [inp + '.inp' for inp in inputs]
    inputs_files = [os.path.join(cluster_path, inp) for inp in inputs]
    correction_jobs = []
    correction_jobs_finished = []
    for inp in inputs_files:
        inp_inp = os.path.join(path, os.path.split(inp)[-1])
        job = Job(cluster_path)
        job.method = os.path.split(inp)[-1].split('.')[0]
        job.input = inp
        if not Correction.if_cal_finish(job):
            correction_jobs.append(job)
            if not os.path.exists(inp) and not os.path.exists(inp_inp):
                print('{} file not found.'.format(inp))
                print('Program will generate the input automatically.')
                if job.method.startswith('per'):
                    Inp = Correction.InputPerRPA(job,
                                                 project_name,
                                                 memorys[job.method],
                                                 uc_atoms=atoms)
                    Inp.gen_inp()
                elif job.method.endswith('rpa_cc'):
                    Inp = Correction.InputRPACC(job,
                                                project_name,
                                                memorys[job.method],
                                                uc_atoms=atoms)
                    Inp.gen_inp()
                elif job.method.endswith('iext1_rpa'):
                    Inp = Correction.InputIext1RPA(job,
                                                   project_name,
                                                   memorys[job.method],
                                                   uc_atoms=atoms)
                    Inp.gen_inp()
            elif not os.path.exists(inp):
                shutil.copy(inp_inp, inp)

        else:
            job.status = 'finished'
            correction_jobs_finished.append(job)

    # generate scr
    for job in correction_jobs:
        Src = Correction.Script(job, nodes[job.method], molpro_key,
                                molpro_path)
        Src.write_scr()

    # submit jobs
    if len(correction_jobs) > 0:
        new_finished_jobs = Correction.submit(correction_jobs)
        correction_jobs_finished += new_finished_jobs

    # read and record all results
    if len(correction_jobs_finished) > 0:
        Correction.read_all_results(path, correction_jobs_finished)

    rec = 'Correction finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
Ejemplo n.º 22
0
def submit(jobs):
    job_num = len(jobs)
    max_paralell = 12
    count = 0
    submitted_jobs = []
    finished_jobs = []

    def test_finished(jobs):
        """
        test jobs which have benn submittdt is finished or not
        if a job finished, add it to list finished_jobs, and delete it from list submitted_jobs
        :param submitted_jobs:
        :return:
        """
        nonlocal count
        for job in jobs[:]:
            if if_cal_finish(job):
                finished_jobs.append(job)
                rec = job.path
                rec += '\n'
                rec += 'calculation finished...\n'
                rec += '---' * 25
                print(rec)
                record(job.root_path, rec)
                jobs.remove(job)
                count -= 1

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_cal_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)

    # submit and detect all jobs
    j = 0
    while True:
        test_finished(submitted_jobs
                      )  # update list finished_jobs and list submitted_jobs
        if len(finished_jobs) == job_num and len(submitted_jobs) == 0:
            break
        else:
            if count <= max_paralell and len(
                    jobs) > 0:  # check the number of jobs which is running now
                new_job = jobs.pop()
                copy_fort80(new_job.path)
                copy_fort9_fort78(new_job.path)
                out = submit_job(new_job, 'lmp2')
                count += 1
                submitted_jobs.append(new_job)
                rec = new_job.path + '\n'
                rec += 'job submitted.'
                rec += '\n' + out + '\n'
                rec += '---' * 25
                record(new_job.root_path, rec)
                print(rec)
            else:
                time.sleep(500)
                j += 1
                if j > 15:
                    rec = 'noting changes.'
                    record(submitted_jobs[0].root_path, rec)
                    j = 0
                continue

    return finished_jobs