示例#1
0
def menu():

    ini_path = os.path.dirname(os.path.realpath(__file__))
    ini_path = os.path.join(ini_path, 'input.ini')

    Ini = ReadIni(ini_path)
    path = Ini.project_path
    start = Ini.start
    end = Ini.end
    test_begin(end, start)

    now = datetime.now()
    now = now.strftime("%b %d %Y %H:%M:%S")
    mkdir(path)
    rec = 'Project begins.'
    rec += '\n' + '***'*25
    rename_file(path, 'record')
    record(path, rec, init=True)
    print('***'*25)
    print(now)
    print(rec)
    try:
        shutil.copy(ini_path, path + '/input.ini')
    except Exception as e:
        print(e)

    Pipeline.pipeline(path, start, end)
示例#2
0
 def write_xyz_with_layernumber(self, cluster=[]):
     if len(self.choosed_atoms) == 0:
         self.get_cluster()
     if len(cluster) == 0:
         cluster = self.choosed_atoms
     file_name = '{}_Cluster.xyz'.format(self.name)
     file_path = os.path.join(self.cluster_path, file_name)
     if not os.path.exists(self.cluster_path):
         mkdir(self.cluster_path)
     with open(file_path, 'w') as f:
         f.write(str(len(cluster)) + '\n')
         f.write('{}_Cluster'.format(self.name) + '\n')
         for atom in cluster:
             ele = periodic_table_rev[int(atom.nat)]
             f.write((str(ele) + str(atom.layer)).center(6) + ' ')
             f.write('{:.12E}'.format(float(atom.x)).rjust(19) + ' ')
             f.write('{:.12E}'.format(float(atom.y)).rjust(19) + ' ')
             f.write('{:.12E}'.format(float(atom.z)).rjust(19))
             f.write('\n')
         rec = str(self.cluster_job) + '\n'
         rec += 'Geometry file generated.\n'
         rec += 'Number of atoms in cluster: {}\n'.format(len(cluster))
         rec += '---' * 25
         print(rec)
         record(self.cluster_job.root_path, rec)
示例#3
0
def cluster(path):

    rec = 'Cluster Cutting begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read parameters from ini file
    Ini = ReadIni()
    name, slab_or_molecule, group, lattice_parameter, number_of_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    center_atoms, factors, deleted_atoms, coord, add_h, out_layer_number = Ini.get_cluster(
    )
    cutting_setting = [coord, add_h]
    record_data_json(path, 'central atoms', center_atoms, section='cluster')
    record_data_json(path, 'cutting factors', factors, section='cluster')
    record_data_json(path, 'deleted atoms', deleted_atoms, section='cluster')
    cutting_setting_dict = {
        'coord': coord,
        'add_h': add_h,
        'out_layer_number': out_layer_number
    }
    record_data_json(path,
                     'cutting setting',
                     cutting_setting_dict,
                     section='cluster')

    # get bilayer jobs
    rpa_jobs = get_jobs(path)
    cluster_jobs = [job for job in rpa_jobs if job.layertype == 'bilayer']
    for job in cluster_jobs:
        if 'rpa' in job.path:
            job.path = job.path.replace('rpa', 'cluster')
        elif 'geo_opt' in job.path:
            job.path = job.path.replace('geo_opt', 'cluster')
        job.method = 'cluster'

    # generate clusters
    cluster_path = os.path.join(path, 'cluster')
    mkdir(cluster_path)
    Cluster.creat_json_file(cluster_path)
    for job in cluster_jobs:
        Clu = Cluster.ClusterCutter(job,
                                    center=center_atoms,
                                    name=name,
                                    fixed_atoms=fixed_atoms,
                                    factors=factors,
                                    cutting_setting=cutting_setting,
                                    deleted_atoms=deleted_atoms)
        if not Cluster.if_cluster_already_generated(job):
            Clu.get_cluster()
            if out_layer_number is True:
                Clu.write_xyz_with_layernumber()
            else:
                Clu.write_xyz()

    rec = 'Cluster Cutting finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
示例#4
0
def end_programm(path):
    now = datetime.now()
    now = now.strftime("%b %d %Y %H:%M:%S")
    rec = 'Program End.\n'
    rec += '***'*25
    print(now)
    print(rec)
    record(path, rec)
    try:
        sys.exit(1)
    except Exception:
        print('Program exit.')
示例#5
0
 def test_finished(jobs):
     nonlocal count
     for job in jobs[:]:
         if if_loc_finish(job):
             finished_jobs.append(job)
             rec = str(job)
             rec += '\n'
             rec += 'Localization finished.\n'
             rec += '---' * 25
             print(rec)
             record(job.root_path, rec)
             jobs.remove(job)
             count -= 1
示例#6
0
 def test_finished(jobs):
     nonlocal count
     for job in jobs[:]:
         if if_cal_finish(job):
             finished_jobs.append(job)
             num = str(len(finished_jobs)) + '/' + str(job_num)
             rec = str(job)
             rec += '\n'
             rec += num + '  calculation finished.\n'
             rec += '---' * 25
             print(rec)
             record(job.root_path, rec)
             jobs.remove(job)
             count -= 1
示例#7
0
 def test_finished(paths):
     nonlocal count  # debug: UnboundLocalError: local variable 'count' referenced before assignment
     for path in paths[:]:
         if if_cal_finish(path):
             finished_jobs.append(path)
             num = str(len(finished_jobs)) + '/' + str(job_numbers)
             rec = path.path
             rec += '\n'
             rec += num + '   calculation finished.\n'
             rec += '---' * 25
             print(rec)
             record(path.root_path, rec)
             paths.remove(path)
             count -= 1
示例#8
0
 def test_finished(jobs):
     nonlocal count
     nonlocal count_dict
     for job in jobs[:]:
         if if_cal_finish(job):
             finished_jobs.append(job)
             num = str(len(finished_jobs)) + '/' + str(total_num)
             rec = str(job) + '\n'
             rec += num + 'calculation finished.\n'
             rec += '---' * 25
             print(rec)
             record(job.root_path, rec)
             jobs.remove(job)
             count -= 1
             count_dict[job.parameter['node']] -= 1
示例#9
0
 def test_finished(jobs):
     """
     test jobs which have benn submittdt is finished or not
     if a job finished, add it to list finished_jobs, and delete it from list submitted_jobs
     :param jobs:
     :return:
     """
     nonlocal count
     for job in jobs[:]:
         if if_cal_finish(job):
             finished_jobs.append(job)
             num = str(len(finished_jobs)) + '/' + str(job_num)
             rec = str(job)
             rec += '\n'
             rec += num + '  calculation finished.\n'
             rec += '---' * 25
             print(rec)
             record(job.root_path, rec)
             jobs.remove(job)
             count -= 1
示例#10
0
def localization(path, moni):

    rec = 'Localization begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = ReadIni()
    nodes, crystal_path = Ini.get_loc()
    if nodes == '' or nodes == 'default':
        nodes = 1
    hf1_jobs = Localization.get_jobs(path)

    # copy input file of localization
    loc_jobs = []
    if len(hf1_jobs) != 0:
        try:
            for job in hf1_jobs:
                if not Localization.if_loc_finish(job):
                    Localization.copy_inp_file(job)
                    Localization.copy_loc_scr(job, nodes, crystal_path)
                    loc_jobs.append(job)
        except Exception as e:
            print(e)
    else:
        print('There is no appropriate Hartree Fock calculation results!!! ')
        print(
            'Program will exit and correct the error and restart from localization step!!!'
        )
        sys.exit(1)

    # submit all jobs
    if len(loc_jobs) > 0:
        loc_finished_job = Localization.submit(loc_jobs, moni)

    rec = 'Localization finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
示例#11
0
def submit(jobs, nodes, crystal_path, moni):
    job_numbers = len(jobs)
    max_paralell = 5
    count = 0
    submitted_jobs = []
    finished_jobs = []

    # find and submit the initial job
    loc = 0
    for job in jobs:
        if job.x == '0' and job.z == '0':
            break
        loc += 1
    if loc < len(jobs):
        job_init = jobs.pop(loc)
        os.chdir(job_init.path)
        if not if_cal_finish(job_init):
            copy_submit_scr(job_init, nodes, crystal_path)
            rename_file(job_init.path, 'geo_opt.out')
            out = submit_job(job_init, 'geo_opt')
            submitted_jobs.append(job_init)
            moni.insert_new_job(job_init, out)
            rec = job_init.path
            print(rec)
            rec += '\n'
            rec += 'job submitted...'
            rec += '\n' + out + '\n'
            rec += '---' * 25
            record(job_init.root_path, rec)
            r = 0
            while True:
                moni.update_status()
                if if_cal_finish(job_init):
                    rec = job_init.path
                    rec += '\n'
                    rec += 'calculation finished...'
                    record(job_init.root_path, rec)
                    submitted_jobs.remove(job_init)
                    finished_jobs.append(job_init)
                    break
                else:
                    time.sleep(500)
                    r += 1
                    # test function
                    # test_init_job(job_init, r)
                    if r > 15:
                        rec = job_init.path
                        rec += '\n'
                        rec += 'initial calculation still not finished...'
                        record(job_init.root_path, rec)
                        r = 0
                    continue
        else:
            finished_jobs.append(job_init)

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_cal_finish(job):
            # print('Job already finished: ', job)
            finished_jobs.append(job)
            jobs.remove(job)
    # test if there is some jobs which are already submitted but not finished
    running_jobs = moni.get_running_jobs()
    for job in jobs[:]:
        if job in running_jobs:
            submitted_jobs.append(job)
            jobs.remove(job)

    def test_finished(paths):
        nonlocal count  # debug: UnboundLocalError: local variable 'count' referenced before assignment
        for path in paths[:]:
            if if_cal_finish(path):
                finished_jobs.append(path)
                num = str(len(finished_jobs)) + '/' + str(job_numbers)
                rec = path.path
                rec += '\n'
                rec += num + '   calculation finished.\n'
                rec += '---' * 25
                print(rec)
                record(path.root_path, rec)
                paths.remove(path)
                count -= 1

    if len(jobs) == 0:
        return finished_jobs
    else:
        i = 0
        j = 0
        while True:
            test_finished(submitted_jobs)
            moni.update_status()
            if len(finished_jobs) == job_numbers and len(submitted_jobs) == 0:
                break
            else:
                if count <= max_paralell and i < len(jobs):
                    print(jobs[i].path)
                    nearest_job = obtain_nearest_job(jobs[i])
                    os.chdir(jobs[i].path)
                    copy_submit_scr(jobs[i], nodes, crystal_path, nearest_job)
                    # copy_fort9(jobs[i])
                    rename_file(jobs[i].path, 'geo_opt.out')
                    rename_file(jobs[i].path, 'fort.9')
                    out = submit_job(jobs[i], 'geo_opt')
                    count += 1
                    submitted_jobs.append(jobs[i])
                    moni.insert_new_job(jobs[i], out)
                    rec = jobs[i].path + '\n'
                    rec += 'job submitted.'
                    rec += '\n' + out + '\n'
                    rec += '---' * 25
                    record(jobs[i].root_path, rec)
                    i += 1
                else:
                    time.sleep(500)
                    j += 1
                    # j = test_calculation(j, submitted_jobs)     # test function
                    if j > 20:
                        rec = 'noting changes.\n'
                        rec += '---' * 25
                        # print(rec)
                        record(submitted_jobs[0].root_path, rec)
                        j = 0
                    continue

        return finished_jobs
示例#12
0
def submit(jobs, moni):

    total_num = len(jobs)
    count = 0
    submitted_jobs = []
    finished_jobs = []
    max_calculations_dict = {'12': 5, '28': 3}

    def test_finished(jobs):
        nonlocal count
        nonlocal count_dict
        for job in jobs[:]:
            if if_cal_finish(job):
                finished_jobs.append(job)
                num = str(len(finished_jobs)) + '/' + str(total_num)
                rec = str(job) + '\n'
                rec += num + 'calculation finished.\n'
                rec += '---' * 25
                print(rec)
                record(job.root_path, rec)
                jobs.remove(job)
                count -= 1
                count_dict[job.parameter['node']] -= 1

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_cal_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)
    # test if there is some jobs which are already submitted but not finished
    running_jobs = moni.get_running_jobs()
    for job in jobs[:]:
        if job in running_jobs:
            submitted_jobs.append(job)
            jobs.remove(job)

    # categorize jobs according to the nodes number
    jobs_dict = {}
    count_dict = {}
    nodes_list = []
    for job in jobs:
        node = job.parameter['node']
        if node not in nodes_list:
            nodes_list.append(node)
            jobs_dict[node] = [job]
            count_dict[node] = 0
        else:
            jobs_dict[node].append(job)

    # submit and detect all jobs
    j = 0
    while True:
        test_finished(submitted_jobs)
        moni.update_status()
        if len(finished_jobs) == total_num and len(submitted_jobs) == 0:
            break
        else:
            for node in nodes_list:
                if count_dict[node] < max_calculations_dict[node] and len(
                        jobs_dict[node]) > 0:
                    new_job = jobs_dict[node].pop()
                    os.chdir(new_job.path)
                    rename_file(new_job.path, '{}.out'.format(new_job.method))
                    out = submit_job(new_job, new_job.method)
                    count += 1
                    count_dict[node] += 1
                    submitted_jobs.append(new_job)
                    moni.insert_new_job(new_job, out)
                    rec = new_job.path + '\n'
                    rec += new_job.method + '\n'
                    rec += 'job submitted.'
                    rec += '\n' + out + '\n'
                    rec += '---' * 25
                    record(new_job.root_path, rec)
                    print(rec)
                else:
                    # time.sleep(0.001)
                    time.sleep(500)
                    # test_calculation(j, jobs, finished_jobs)
                    j += 1
                    if j > 8:
                        rec = 'noting changes.\n'
                        rec += '---' * 25
                        record(submitted_jobs[0].root_path, rec)
                        j = 0
                    continue

    return finished_jobs
示例#13
0
def submit(jobs, nodes, crystal_path, moni):
    job_num = len(jobs)
    max_paralell = 5
    count = 0
    submitted_jobs = []
    finished_jobs = []

    def test_finished(jobs):
        nonlocal count
        for job in jobs[:]:
            if if_cal_finish(job):
                finished_jobs.append(job)
                num = str(len(finished_jobs)) + '/' + str(job_num)
                rec = str(job)
                rec += '\n'
                rec += num + '  calculation finished.\n'
                rec += '---' * 25
                print(rec)
                record(job.root_path, rec)
                count -= 1
                jobs.remove(job)

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_cal_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)
    # test if there is some jobs which are already submitted but not finished
    running_jobs = moni.get_running_jobs()
    for job in jobs[:]:
        if job in running_jobs:
            submitted_jobs.append(job)
            jobs.remove(job)

    # find and submit the initial job
    # print('number of jobs: ', len(jobs))
    init_jobs = []
    for job in jobs[:]:
        if job.x == '0' and job.z == '0':
            init_jobs.append(job)
            jobs.remove(job)
    for job in init_jobs[:]:
        if not if_cal_finish(job):
            os.chdir(job.path)
            rename_file(job.path, 'hf.out')
            out = submit_job(job, 'hf')
            count += 1
            submitted_jobs.append(job)
            moni.insert_new_job(job, out)
            rec = str(job)
            print(rec)
            rec += '\n'
            rec += 'job submitted.'
            rec += '\n' + out + '\n'
            rec += '---' * 25
            record(job.root_path, rec)
        else:
            finished_jobs.append(job)
    # detect if init jobs finished
    r = 0
    while True:
        # test_finished(submitted_jobs)      # test function
        moni.update_status()
        if len(submitted_jobs) == 0:
            break
        else:
            time.sleep(500)
            r += 1
            if r > 15:
                rec = 'initial calculation still not finished.\n'
                rec += '---' * 25
                record(submitted_jobs[0].root_path, rec)
                r = 0

    # submit and detect the other jobs
    j = 0
    while True:
        test_finished(submitted_jobs)
        moni.update_status()
        if len(finished_jobs) == job_num and len(submitted_jobs) == 0:
            break
        else:
            if count < max_paralell and len(jobs) != 0:
                new_job = jobs.pop()
                os.chdir(new_job.path)
                nearest_job = obtain_nearest_job(new_job)
                rename_file(new_job.path, 'hf.out')
                rename_file(new_job.path, 'fort.9')
                # copy_fort9(new_job)
                copy_submit_scr(new_job, nodes, crystal_path, nearest_job)
                out = submit_job(new_job, 'hf')
                count += 1
                submitted_jobs.append(new_job)
                moni.insert_new_job(new_job, out)
                rec = str(new_job) + '\n'
                rec += 'job submitted.'
                rec += '\n' + out + '\n'
                rec += '---' * 25
                record(new_job.root_path, rec)
                print(rec)
            else:
                # time.sleep(10)
                time.sleep(500)
                j += 1
                # test_calculation(j, jobs, submitted_jobs, finished_jobs)    # test function
                if j > 15:
                    rec = 'noting changes.\n'
                    rec += '---' * 25
                    record(submitted_jobs[0].root_path, rec)
                    j = 0
                continue

    return finished_jobs
示例#14
0
def submit(jobs, moni):
    job_num = len(jobs)
    max_paralell = 5
    count = 0
    submitted_jobs = []
    finished_jobs = []

    def test_finished(jobs):
        nonlocal count
        for job in jobs[:]:
            if if_cal_finish(job):
                finished_jobs.append(job)
                num = str(len(finished_jobs)) + '/' + str(job_num)
                rec = str(job)
                rec += '\n'
                rec += num + '  calculation finished.\n'
                rec += '---' * 25
                print(rec)
                record(job.root_path, rec)
                jobs.remove(job)
                count -= 1

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_cal_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)
    # test if there is some jobs which are already submitted but not finished
    running_jobs = moni.get_running_jobs()
    for job in jobs[:]:
        if job in running_jobs:
            submitted_jobs.append(job)
            jobs.remove(job)

    # submit and detect all jobs
    j = 0
    while True:
        test_finished(submitted_jobs)
        moni.update_status()
        if len(finished_jobs) == job_num and len(submitted_jobs) == 0:
            break
        else:
            if count < max_paralell and len(jobs) > 0:
                new_job = jobs.pop()
                os.chdir(new_job.path)
                rename_file(new_job.path, 'hf2.out')
                out = submit_job(new_job, 'hf2')
                count += 1
                submitted_jobs.append(new_job)
                moni.insert_new_job(new_job, out)
                rec = str(new_job) + '\n'
                rec += 'job submitted.'
                rec += '\n' + out + '\n'
                rec += '---' * 25
                record(new_job.root_path, rec)
                print(rec)
            else:
                time.sleep(500)
                # time.sleep(200)
                j += 1
                # test_calculation(j, jobs, submitted_jobs, finished_jobs)    # test function
                if j > 15:
                    rec = 'noting changes.\n'
                    rec += '---' * 25
                    record(submitted_jobs[0].root_path, rec)
                    j = 0
                continue

    return finished_jobs
示例#15
0
def hf2(path, moni):

    rec = 'Second Hartree Fock Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    init_dist = read_init_dis(path)
    # read basic computation information
    jobs_HF1 = HF2.get_jobs(path)
    Ini = ReadIni()
    name, slab_or_molecule, group, lattice_parameter, number_of_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    bs_type, nodes, crystal_path = Ini.get_hf2()
    cal_parameters = Ini.get_cal_parameters('HF2')
    aos = Ini.get_aos()
    if nodes == '' or nodes == 'default':
        nodes = 12
    record_data_json(path, 'basis_set', bs_type, section='hf2')
    record_data_json(path, 'nodes', nodes, section='hf2')

    # categorization
    bilayer = []
    singlelayer = []
    for job in jobs_HF1:
        if job.layertype == 'bilayer':
            bilayer.append(job)
        elif job.layertype == 'underlayer' or job.layertype == 'upperlayer':
            singlelayer.append(job)

    # generation of all input files
    hf2_jobs = []
    hf2_jobs_finished = []
    for job in bilayer:
        new_path = job.path
        new_path = new_path.replace('hf1', 'hf2')
        new_job = Job(new_path)
        if not HF2.if_cal_finish(new_job):
            Inp = HF2.Input(job,
                            name,
                            slab_or_molecule,
                            group,
                            bs_type=bs_type,
                            layertype='bilayer',
                            fixed_atoms=fixed_atoms,
                            cal_parameters=cal_parameters,
                            aos=aos)
            Inp.gen_input()
            HF2.copy_submit_scr(job, nodes, crystal_path)
            # HF2.copy_fort9(job)
            hf2_jobs.append(new_job)
        else:
            hf2_jobs_finished.append(new_job)
    for job in singlelayer:
        new_path = job.path
        new_path = new_path.replace('hf1', 'hf2')
        new_job = Job(new_path)
        if not HF2.if_cal_finish(new_job):
            Inp = HF2.Layer_Inp(job,
                                name,
                                slab_or_molecule,
                                group,
                                bs_type=bs_type,
                                layertype=job.layertype,
                                fixed_atoms=fixed_atoms,
                                cal_parameters=cal_parameters,
                                aos=aos)
            Inp.gen_input()
            HF2.copy_submit_scr(job, nodes, crystal_path)
            # HF2.copy_fort9(job)
            hf2_jobs.append(new_job)
        else:
            hf2_jobs_finished.append(new_job)

    # submit the jobs
    if len(hf2_jobs) > 0:
        new_finished_jobs = HF2.submit(hf2_jobs, moni)
        hf2_jobs_finished += new_finished_jobs

    # read calculation results
    if len(hf2_jobs_finished) > 0:
        HF2.read_all_results_hf2(hf2_jobs_finished, init_dist=init_dist)

    rec = 'HF2 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
示例#16
0
def hf1(path, moni):

    rec = 'First Hartree Fock Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read info from input.ini file
    init_dist = HF1.read_init_dis(path)
    Ini = ReadIni()
    name, slab_or_molecule, group, lattice_parameter, number_of_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    geometry = Geometry(geometry=geometry)
    bs_type, nodes, crystal_path = Ini.get_hf1()
    cal_parameters = Ini.get_cal_parameters('HF1')
    if nodes == '' or nodes == 'default':
        nodes = 12
    record_data_json(path, 'basis_set', bs_type, section='hf1')
    record_data_json(path, 'nodes', nodes, section='hf1')

    jobs_GeoOpt = HF1.select_jobs(path)
    jobs_HF1 = []
    new_jobs = []
    hf1_jobs_finished = []
    # input for the whole system
    # print('number Geo Opt', len(jobs_GeoOpt))
    for job in jobs_GeoOpt:
        path_GeoOpt = job.path
        # Bilayer
        path_HF1 = path_GeoOpt.replace('geo_opt', 'hf1')
        new_job = Job(path_HF1)
        if not HF1.if_cal_finish(new_job):
            Inp = HF1.Input(job,
                            name,
                            slab_or_molecule,
                            group,
                            bs_type,
                            layertype='bilayer',
                            fiexed_atoms=fixed_atoms,
                            cal_parameters=cal_parameters,
                            geometry=geometry,
                            lattice_parameters=lattice_parameter)
            Inp.gen_input()
            HF1.copy_submit_scr(new_job, nodes, crystal_path)
            new_jobs.append(new_job)
        else:
            hf1_jobs_finished.append(new_job)
        jobs_HF1.append(new_job)
        # upperlayer
        path_upper = os.path.join(path_HF1, 'upperlayer')
        new_job = Job(path_upper)
        if not HF1.if_cal_finish(new_job):
            Inp = HF1.Layer_Inp(job,
                                name,
                                slab_or_molecule,
                                group,
                                bs_type,
                                layertype='upperlayer',
                                fiexed_atoms=fixed_atoms,
                                cal_parameters=cal_parameters)
            Inp.gen_input()
            HF1.copy_submit_scr(new_job, nodes, crystal_path)
            new_jobs.append(new_job)
        else:
            hf1_jobs_finished.append(new_job)
        jobs_HF1.append(new_job)
        # underlayer
        path_under = os.path.join(path_HF1, 'underlayer')
        new_job = Job(path_under)
        if not HF1.if_cal_finish(new_job):
            Inp = HF1.Layer_Inp(job,
                                name,
                                slab_or_molecule,
                                group,
                                bs_type,
                                layertype='underlayer',
                                fiexed_atoms=fixed_atoms,
                                cal_parameters=cal_parameters)
            Inp.gen_input()
            HF1.copy_submit_scr(new_job, nodes, crystal_path)
            new_jobs.append(new_job)
        else:
            hf1_jobs_finished.append(new_job)
        jobs_HF1.append(new_job)

    # Submit the calculation job
    hf1_jobs_finished_new = HF1.submit(new_jobs, nodes, crystal_path, moni)
    hf1_jobs_finished += hf1_jobs_finished_new

    # read calculation results
    HF1.read_all_results_hf1(hf1_jobs_finished, init_dist)

    # deal with not-converged jobs
    # jobs_not_converged = [job for job in hf1_jobs_finished if job.status == 'not converged']
    # hf1_jobs_finished = [job for job in hf1_jobs_finished if job.status != 'not converged']
    # # try to not use GUESSP
    # for job in jobs_not_converged:
    #     HF1.delete_guessp(job)
    # new_jobs_finished = HF1.submit(jobs_not_converged)
    # hf1_jobs_finished += new_jobs_finished
    # jobs_not_converged = [job for job in hf1_jobs_finished if job.status == 'not converged']
    # hf1_jobs_finished = [job for job in hf1_jobs_finished if job.status != 'not converged']
    # # if still not converged, try to change some parameters
    # while len(jobs_not_converged) > 0:
    #     HF1.change_parameters(jobs_not_converged)
    #     new_jobs_finished = HF1.submit(jobs_not_converged)
    #     hf1_jobs_finished += hf1_jobs_finished_new
    #     jobs_not_converged = [job for job in hf1_jobs_finished if job.status == 'not converged']
    #     hf1_jobs_finished = [job for job in hf1_jobs_finished if job.status != 'not converged']

    rec = 'HF1 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
示例#17
0
def hf1_start(path, moni):

    rec = 'First Hartree Fock Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # read infos from input.ini file
    Ini = ReadIni()
    name, slab_or_molecule, group, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info(
    )
    distance_series, shift_series = Ini.get_series()
    cal_parameters = Ini.get_cal_parameters('Geo_Opt')
    record_data_json(path, 'project_name', name)
    record_data_json(path, 'system_type', slab_or_molecule)
    record_data_json(path, 'lattice_parameter', lattice_parameter)
    record_data_json(path, 'geometry', geometry)
    record_data_json(path, 'fixed_atoms', fixed_atoms)
    if isinstance(fixed_atoms, list) and len(fixed_atoms) == 2:
        geometry = Geometry(geometry=geometry, fixed_atoms=fixed_atoms)
    else:
        geometry = Geometry(geometry=geometry)
    original_geometry = deepcopy(geometry)
    bs_type, nodes, crystal_path = Ini.get_hf1()
    cal_parameters = Ini.get_cal_parameters('HF1')
    if nodes == '' or nodes == 'default':
        nodes = 12
    record_data_json(path, 'basis_set', bs_type, section='hf1')
    record_data_json(path, 'nodes', nodes, section='hf1')

    jobs_HF1 = []
    new_jobs = []
    hf1_jobs_finished = []
    # generation of the first INPUT
    dirname = 'x_0/z_0'
    job = os.path.join(path, 'hf1')
    job = os.path.join(job, dirname)
    job = Job(job)
    jobs_finished = []
    GeoOPt.write_init_dist(geometry, path)

    job_geo_dict = {job: geometry}
    # Generation of the job with different layer distance
    diff_distances = GeoOPt.Range_of_Distances(geometry, distance_series)
    geo_with_diff_distance = diff_distances.get_geo_series()
    init_distance = diff_distances.init_distance
    for distance, geometry in geo_with_diff_distance.items():
        new_job = deepcopy(job)
        new_z_dirname = 'z_{0:.3f}'.format(distance)
        new_job.reset('z_dirname', new_z_dirname)
        job_geo_dict[new_job] = geometry

    # Generation of the job with different displacement, produce ((0.1, 0),
    # (0.25, 0), (0.35, 0), (0.5, 0))
    range_of_displacement = GeoOPt.Range_of_Displacement(
        original_geometry, shift_series)
    geo_with_diff_displacement = range_of_displacement.get_geo_series()
    job_geo_dict_dis = {}
    for displacement, geometry in geo_with_diff_displacement.items():
        new_job = deepcopy(job)
        new_x_dirname = 'x_{0:.2f}'.format(displacement)
        new_job.reset('x_dirname', new_x_dirname)
        job_geo_dict_dis[new_job] = geometry
        job_geo_dict[new_job] = geometry

    # generate jobs with various distance under different relatvie shifts
    for shift, geometry in geo_with_diff_displacement.items():
        Geo_with_diff_Dis_diff_Distance = GeoOPt.Range_of_Distances(
            geometry, distance_series)
        geo_with_diff_dis_diff_distance = Geo_with_diff_Dis_diff_Distance.get_geo_series(
        )
        distances = sorted(geo_with_diff_dis_diff_distance.keys())
        loc = 3
        for i in range(len(distances)):
            if distances[i - 1] <= 0 and distances[i] >= 0:
                loc = i
        # Select the some of the distance values
        for key in list(geo_with_diff_dis_diff_distance.keys()):
            if key not in distances[loc - 2:loc + 2]:
                del geo_with_diff_dis_diff_distance[key]
        # print(list(geo_with_diff_dis_diff_distance.keys()))
        for distance, geo in geo_with_diff_dis_diff_distance.items():
            dirname = 'x_{0:.2f}/z_{1:.3f}'.format(shift, distance)
            new_path = os.path.join(path, os.path.join('geo_opt', dirname))
            new_job = Job(new_path)
            # print(new_job)
            job_geo_dict[new_job] = geo

    # generation all INPUT files
    for job, geometry in job_geo_dict.items():
        if not HF1.if_cal_finish(job):
            # print('JOB not finished yet: ', job)
            HF1_Inp = HF1.Input(job,
                                name,
                                slab_or_molecule,
                                group,
                                bs_type,
                                layertype='bilayer',
                                fiexed_atoms=fixed_atoms,
                                cal_parameters=cal_parameters,
                                geometry=geometry,
                                lattice_parameters=lattice_parameter)
            HF1_Inp.gen_input()
            HF1.copy_submit_scr(job, nodes, crystal_path)
            new_jobs.append(job)
        else:
            jobs_finished.append(job)
        jobs_HF1.append(job)
        # deal with layers
        # upperlayer
        path_upper = os.path.join(job.path, 'upperlayer')
        upper_job = Job(path_upper)
        if not HF1.if_cal_finish(upper_job):
            Inp = HF1.Layer_Inp(job,
                                name,
                                slab_or_molecule,
                                group,
                                bs_type,
                                layertype='upperlayer',
                                fiexed_atoms=fixed_atoms,
                                cal_parameters=cal_parameters,
                                geometry=geometry,
                                lattice_parameters=lattice_parameter)
            Inp.gen_input()
            HF1.copy_submit_scr(upper_job, nodes, crystal_path)
            new_jobs.append(upper_job)
        else:
            hf1_jobs_finished.append(upper_job)
        jobs_HF1.append(upper_job)
        # underlayer
        path_under = os.path.join(job.path, 'underlayer')
        under_job = Job(path_under)
        if not HF1.if_cal_finish(under_job):
            Inp = HF1.Layer_Inp(job,
                                name,
                                slab_or_molecule,
                                group,
                                bs_type,
                                layertype='underlayer',
                                fiexed_atoms=fixed_atoms,
                                cal_parameters=cal_parameters,
                                geometry=geometry,
                                lattice_parameters=lattice_parameter)
            Inp.gen_input()
            HF1.copy_submit_scr(under_job, nodes, crystal_path)
            new_jobs.append(under_job)
        else:
            hf1_jobs_finished.append(under_job)
        jobs_HF1.append(upper_job)

    # Submit the calculation job
    hf1_jobs_finished_new = HF1.submit(new_jobs, nodes, crystal_path, moni)
    hf1_jobs_finished += hf1_jobs_finished_new

    # read calculation results
    init_dist = HF1.read_init_dis(path)
    HF1.read_all_results_hf1(hf1_jobs_finished, init_dist)

    rec = 'HF1 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
示例#18
0
def rpa(path, moni):

    rec = 'LRPA begins.\n'
    rec += '---'*25
    print(rec)
    record(path, rec)

    # read basic computation information
    init_dist = read_init_dis(path)
    lmp2_jobs = RPA.get_jobs(path)
    Ini = ReadIni()
    nodes_rpa_b, memory_b, nodes_rpa_s, memory_s, molpro_path, molpro_key = Ini.get_rpa()

    # categorization
    bilayer = []
    singlelayer = []
    for job in lmp2_jobs:
        if job.layertype == 'bilayer':
            bilayer.append(job)
        elif job.layertype == 'underlayer' or job.layertype == 'upperlayer':
            singlelayer.append(job)

    # generate inp file and scr file
    rpa_jobs = []
    rpa_jobs_finished = []
    for job in bilayer:
        new_path = job.path
        new_path = new_path.replace('lmp2', 'rpa')
        new_job = Job(new_path)
        new_job.parameter['nodes'] = nodes_rpa_b
        if not RPA.if_cal_finish(new_job):
            Inp = RPA.RPA_Input(job, memory_b)
            Inp.generate_input()
            rpa_jobs.append(new_job)
            Scr = RPA.Scr(new_job, nodes_rpa_b, molpro_key, molpro_path)
            Scr.gen_scr()
        else:
            new_job.status = 'finished'
            rpa_jobs_finished.append(new_job)
    for job in singlelayer:
        new_path = job.path
        new_path = new_path.replace('lmp2', 'rpa')
        new_job = Job(new_path)
        new_job.parameter['nodes'] = nodes_rpa_s
        if not RPA.if_cal_finish(new_job):
            Inp = RPA.RPA_Input(job, memory_s)
            Inp.generate_input()
            Scr = RPA.Scr(new_job, nodes_rpa_s, molpro_key, molpro_path)
            Scr.gen_scr()
            rpa_jobs.append(new_job)
        else:
            new_job.status = 'finished'
            rpa_jobs_finished.append(new_job)

    # submit the jobs
    if len(rpa_jobs) > 0:
        new_finished_jobs = RPA.submit(rpa_jobs, moni)
        rpa_jobs_finished += new_finished_jobs
    # read calculation results
    if len(rpa_jobs_finished) > 0:
        RPA.read_and_record_all_results(rpa_jobs_finished, init_dist)

    rec = 'LRPA finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
示例#19
0
def geo_opt(path, moni):

    rec = 'Geometry Optimization begins.'
    print(rec)
    record(path, rec)
    GeoOPt.creat_geo_lat_json(path)     # might be deleted

    # read infos from input.ini file
    Ini = ReadIni()
    name, slab_or_molecule, group, lattice_parameter, number_atoms, geometry, fixed_atoms = Ini.get_basic_info()
    distance_series, shift_series = Ini.get_series()
    cal_parameters = Ini.get_cal_parameters('Geo_Opt')
    record_data_json(path, 'project_name', name)
    record_data_json(path, 'system_type', slab_or_molecule)
    record_data_json(path, 'lattice_parameter', lattice_parameter)
    record_data_json(path, 'geometry', geometry)
    record_data_json(path, 'fixed_atoms', fixed_atoms)
    if isinstance(fixed_atoms, list) and len(fixed_atoms) == 2:
        geometry = Geometry(geometry=geometry, fixed_atoms=fixed_atoms)
    else:
        geometry = Geometry(geometry=geometry)
    original_geometry = deepcopy(geometry)
    bs_type, functional, nodes, crystal_path = Ini.get_geo_opt()
    record_data_json(path, 'basis_set', bs_type, section='geo_opt')
    record_data_json(path, 'functional', functional, section='geo_opt')
    record_data_json(path, 'nodes', nodes, section='geo_opt')
    # test_ini_read(group, lattice_parameter, number_atoms, slab_or_molecule)

    jobs = []
    new_jobs = []
    # generation of the first INPUT
    dirname = 'x_0/z_0'
    job = os.path.join(path, 'geo_opt')
    job = os.path.join(job, dirname)
    job = Job(job)
    jobs_finished = []
    if not if_cal_finish(job):
        Geo_Inp = GeoOPt.Geo_Opt_Input(
            job,
            name,
            slab_or_molecule,
            group,
            lattice_parameter,
            geometry,
            bs_type,
            functional,
            cal_parameters)
        Geo_Inp.gen_input()
        new_jobs.append(job)
    else:
        jobs_finished.append(job)
    jobs.append(job)
    GeoOPt.write_init_dist(geometry, path)

    job_geo_dict = {}
    # Generation of the job with different layer distance
    diff_distances = GeoOPt.Range_of_Distances(geometry, distance_series)
    geo_with_diff_distance = diff_distances.get_geo_series()
    init_distance = diff_distances.init_distance
    for distance, geometry in geo_with_diff_distance.items():
        new_job = deepcopy(job)
        new_z_dirname = 'z_{0:.3f}'.format(distance)
        new_job.reset('z_dirname', new_z_dirname)
        job_geo_dict[new_job] = geometry

    # Generation of the job with different displacement, produce ((0.1, 0),
    # (0.25, 0), (0.35, 0), (0.5, 0))
    range_of_displacement = GeoOPt.Range_of_Displacement(
        original_geometry, shift_series)
    geo_with_diff_displacement = range_of_displacement.get_geo_series()
    job_geo_dict_dis = {}
    for displacement, geometry in geo_with_diff_displacement.items():
        new_job = deepcopy(job)
        new_x_dirname = 'x_{0:.2f}'.format(displacement)
        new_job.reset('x_dirname', new_x_dirname)
        job_geo_dict_dis[new_job] = geometry
        job_geo_dict[new_job] = geometry

    # generate jobs with various distance under different relatvie shifts
    for shift, geometry in geo_with_diff_displacement.items():
        Geo_with_diff_Dis_diff_Distance = GeoOPt.Range_of_Distances(geometry, distance_series)
        geo_with_diff_dis_diff_distance = Geo_with_diff_Dis_diff_Distance.get_geo_series()
        distances = list(geo_with_diff_dis_diff_distance.keys())
        distances.sort()
        loc = 3
        for i in range(len(distances)):
            if distances[i-1] <= 0 and distances[i] >= 0:
                loc = i
        # Select the some of the distance values
        for key in list(geo_with_diff_dis_diff_distance.keys()):
            if key not in distances[loc-2:loc+2]:
                del geo_with_diff_dis_diff_distance[key]
        # print(list(geo_with_diff_dis_diff_distance.keys()))
        for distance, geo in geo_with_diff_dis_diff_distance.items():
            dirname = 'x_{0:.2f}/z_{1:.3f}'.format(shift, distance)
            new_path = os.path.join(path, os.path.join('geo_opt', dirname))
            new_job = Job(new_path)
            # print(new_job)
            job_geo_dict[new_job] = geo

    # generation all INPUT files besides the first one above
    for job, geometry in job_geo_dict.items():
        if not if_cal_finish(job):
            # print('JOB not finished yet: ', job)
            Geo_Inp = GeoOPt.Geo_Opt_Input(
                job,
                name,
                slab_or_molecule,
                group,
                lattice_parameter,
                geometry,
                bs_type,
                functional,
                cal_parameters)
            Geo_Inp.gen_input()
            new_jobs.append(job)
        else:
            jobs_finished.append(job)
        jobs.append(job)
    # Copy files and Submit the calculation job above
    new_jobs_finished = GeoOPt.submit(
        new_jobs, nodes, crystal_path, moni)
    jobs_finished += new_jobs_finished

    # # Select the optimal distance of each x point
    # para = [
    #     name,
    #     slab_or_molecule,
    #     group,
    #     lattice_parameter,
    #     bs_type,
    #     functional,
    #     nodes,
    #     crystal_path]
    # # x_10
    # x_10 = {job: geometry for job, geometry in job_geo_dict_dis.items()
    #         if job.x == '0.10'}
    # jobs_10 = [job for job in job_geo_dict_dis.keys() if job.x == '0.10']
    # init_job_10 = jobs_10[0]
    # jobs_10, x_10, min_job_10, jobs_10_finished = GeoOPt.select_optimal_dist(
    #     x_10, 0, para)
    # jobs += jobs_10
    # # x_25
    # x_25 = {job: geometry for job, geometry in job_geo_dict_dis.items()
    #         if job.x == '0.25'}
    # jobs_25 = [job for job in job_geo_dict_dis.keys() if job.x == '0.25']
    # init_job_25 = jobs_25[0]
    # pos_min_10 = look_for_in_list(jobs_10, min_job_10)
    # pos_init_10 = look_for_in_list(jobs_10, init_job_10)
    # diff = pos_min_10 - pos_init_10
    # jobs_25, x_25, min_job_25, jobs_25_finished = GeoOPt.select_optimal_dist(
    #     x_25, diff, para)
    # jobs += jobs_25
    # # x_35
    # x_35 = {job: geometry for job, geometry in job_geo_dict_dis.items()
    #         if job.x == '0.35'}
    # init_job_35 = list(x_35.keys())[0]
    # pos_min_25 = look_for_in_list(jobs_25, min_job_25)
    # pos_init_25 = look_for_in_list(jobs_25, init_job_25)
    # diff = pos_min_25 - pos_init_25
    # jobs_35, x_35, min_job_35, jobs_35_finished = GeoOPt.select_optimal_dist(
    #     x_35, diff, para)
    # jobs += jobs_35
    # # x_50
    # x_50 = {job: geometry for job, geometry in job_geo_dict_dis.items()
    #         if job.x == '0.50'}
    # init_job_50 = list(x_50.keys())[0]
    # pos_min_35 = look_for_in_list(jobs_35, min_job_35)
    # pos_init_35 = look_for_in_list(jobs_35, init_job_35)
    # diff = pos_min_35 - pos_init_35
    # jobs_50, x_50, min_job_50, jobs_50_finished = GeoOPt.select_optimal_dist(
    #     x_50, diff, para)
    # jobs += jobs_50
    #
    # # read calculation results
    # jobs_finished += jobs_10_finished
    # jobs_finished += jobs_25_finished
    # jobs_finished += jobs_35_finished
    # jobs_finished += jobs_50_finished
    GeoOPt.read_all_results(jobs_finished, init_distance)

    rec = 'Geometry optimization finished!\n'
    rec += '***'*25
    print(rec)
    record(path, rec)
示例#20
0
def results(path):

    # get jobs
    correction_jobs, root_jobs = get_jobs(path)
    correction_jobs_dict = {}

    # read results of correction
    init_dist = read_init_dis(path)
    if not if_results_json_exits(path) and len(correction_jobs) != 0:
        Correction.read_all_results(correction_jobs, init_dist)
    correction_results = []
    for job in correction_jobs:
        CoRe = Results.CorrectionResult(job)
        correction_results.append(CoRe)

    # choose different correction resluts and catagorize by basis-set
    method_error_jobs = []
    method_error_dict = {}
    iext1_rpa_dict = {}
    bas_rpa_iext1_dict = {}
    for Res in correction_results:
        # if Res.step == 'rpa_cc':
        if '_cc' in Res.step:
            method_error_jobs.append(Res)
            if Res.bs not in method_error_dict:
                method_error_dict[Res.bs] = {Res.coord: Res}
            else:
                method_error_dict[Res.bs][Res.coord] = Res
        # elif Res.step == 'bas_rpa_iext1':
        elif 'bas_' in Res.step:
            if Res.bs not in bas_rpa_iext1_dict:
                bas_rpa_iext1_dict[Res.bs] = {Res.coord: Res}
            else:
                bas_rpa_iext1_dict[Res.bs][Res.coord] = Res
        # elif Res.step == 'iext1_rpa':
        elif Res.step == 'iext1_rpa' or Res.step == 'iext1_lmp2':
            if Res.bs not in iext1_rpa_dict:
                iext1_rpa_dict[Res.bs] = {Res.coord: Res}
            else:
                iext1_rpa_dict[Res.bs][Res.coord] = Res

    coord_set = {job.coord for job in method_error_jobs}
    coord_list = list(coord_set)

    # get extraplation values
    extrap_method_error = {'avdtz': {}, 'avtqz': {}}
    extrap_iext1_rpa = {'avdtz': {}, 'avtqz': {}}
    basis_set_correction = {'avdtz': {}, 'avtqz': {}}
    for coord in coord_list:
        # method error correction
        if 'avdz' and 'avtz' in method_error_dict:
            if coord in method_error_dict['avdz'] and method_error_dict['avtz']:
                avdz = method_error_dict['avdz'][coord]
                avtz = method_error_dict['avtz'][coord]
                avdtz = deepcopy(avtz)
                avdtz.set_extrapolation_energy(avdz, avtz, [2, 3])
                avdtz.bs = 'av(d/t)z'
                extrap_method_error['avdtz'][coord] = avdtz
            # else:
            #     print(coord)
        if 'avtz' and 'avqz' in method_error_dict:
            if coord in method_error_dict['avtz'] and method_error_dict['avqz']:
                avtz = method_error_dict['avtz'][coord]
                avqz = method_error_dict['avqz'][coord]
                avtqz = deepcopy(avqz)
                avtqz.set_extrapolation_energy(avtz, avqz, [3, 4])
                avtqz.bs = 'av(t/q)z'
                extrap_method_error['avtqz'][coord] = avtqz
            # else:
            #     print(coord)
        # basis set correction
        # iext1
        if 'avdz' and 'avtz' in iext1_rpa_dict:
            if coord in iext1_rpa_dict['avdz'] and iext1_rpa_dict['avtz']:
                avdz = iext1_rpa_dict['avdz'][coord]
                avtz = iext1_rpa_dict['avtz'][coord]
                avdtz = deepcopy(avtz)
                avdtz.set_extrapolation_energy(avdz, avtz, [2, 3])
                avdtz.bs = 'av(d/t)z'
                extrap_iext1_rpa['avdtz'][coord] = avdtz
        if 'avtz' and 'avqz' in iext1_rpa_dict:
            if coord in iext1_rpa_dict['avtz'] and iext1_rpa_dict['avqz']:
                avtz = iext1_rpa_dict['avtz'][coord]
                avqz = iext1_rpa_dict['avqz'][coord]
                avtqz = deepcopy(avtz)
                avtqz.set_extrapolation_energy(avtz, avqz, [3, 4])
                avtqz.bs = 'av(t/q)z'
                extrap_iext1_rpa['avtqz'][coord] = avtqz
        # get basis set correction
        if len(extrap_iext1_rpa['avdtz']) != 0:
            if coord in extrap_iext1_rpa[
                    'avdtz'] and coord in bas_rpa_iext1_dict['per']:
                bs_correction = extrap_iext1_rpa['avdtz'][coord] - \
                    bas_rpa_iext1_dict['per'][coord]
                bs_correction.bs = 'av(d/t)z'
                basis_set_correction['avdtz'][coord] = bs_correction
        if len(extrap_iext1_rpa['avtqz']) != 0:
            if coord in extrap_iext1_rpa[
                    'avtqz'] and coord in bas_rpa_iext1_dict['per']:
                bs_correction = extrap_iext1_rpa['avtqz'][coord] - \
                    bas_rpa_iext1_dict['per'][coord]
                bs_correction.bs = 'av(t/q)z'
                basis_set_correction['avtqz'][coord] = bs_correction
    # record above data
    results_file = os.path.join(path, 'final_results.json')
    record_correction_results(basis_set_correction, coord_list,
                              extrap_iext1_rpa, extrap_method_error, init_dist,
                              results_file)

    # get Hartree Fock values
    hf2_json_file = os.path.join(path, 'hf2')
    hf2_json_file = os.path.join(hf2_json_file, 'hf2.json')
    hf2_coords = set()
    hf2_jobs = []
    for job in correction_jobs:
        if job.coord not in hf2_coords:
            job_path = job.path
            new_job_path = job_path.replace('cluster', 'hf2')
            new_job = Job(new_job_path)
            hf2_jobs.append(new_job)
            hf2_coords.add(new_job.coord)
    for job in hf2_jobs[:]:
        job_path = job.path
        under_path = os.path.join(job_path, 'underlayer')
        upper_path = os.path.join(job_path, 'upperlayer')
        under_job = Job(under_path)
        upper_job = Job(upper_path)
        hf2_jobs.append(under_job)
        hf2_jobs.append(upper_job)
    if not os.path.exists(hf2_json_file) and len(hf2_jobs) != 0:
        HF2.read_all_results_hf2(hf2_jobs, init_dist=init_dist)
    hf2 = {}
    for job in hf2_jobs:
        coord = str(job.coord)
        Res = Results.FResult(job)
        Res.read_info_from_json()
        if coord not in hf2:
            hf2[coord] = {}
        hf2[coord][job.layertype] = Res
    # calculate layer energy
    for coord in coord_list:
        layer_Res = hf2[coord]['bilayer'] - \
            hf2[coord]['upperlayer'] - hf2[coord]['underlayer']
        layer_Res.record_data('layer energy')
        layer_Res.record_data('hf', results_file)
        hf2[coord]['layer energy'] = layer_Res

    # get embedded fragment LdrCCD (RPA) values
    rpa_jobs = []
    for job in hf2_jobs:
        job_path = job.path
        new_job_path = job_path.replace('hf2', 'rpa')
        new_job = Job(new_job_path)
        rpa_jobs.append(new_job)
    rpa_json_file = os.path.join(path, 'rpa')
    rpa_json_file = os.path.join(rpa_json_file, 'rpa.json')
    if not os.path.exists(rpa_json_file) and len(rpa_jobs) != 0:
        RPA.read_and_record_all_results(rpa_jobs)
    rpa = {}
    for job in rpa_jobs:
        coord = str(job.coord)
        Res = Results.FResult(job)
        Res.read_info_from_json()
        if coord not in rpa:
            rpa[coord] = {}
        rpa[coord][job.layertype] = Res
    # calculate layer energy
    for coord in coord_list:
        layer_Res = rpa[coord]['bilayer'] - \
            rpa[coord]['upperlayer'] - rpa[coord]['underlayer']
        rpa[coord]['layer energy'] = layer_Res
        layer_Res.record_data('layer energy')
        layer_Res.record_data('rpa', results_file)

    # get final results
    final_data = {'avdtz': {}, 'avtqz': {}}
    for coord in coord_list:
        if len(extrap_method_error['avdtz']) != 0:
            final_data['avdtz'][coord] = hf2[coord]['layer energy'] + rpa[coord]['layer energy'] + \
                extrap_method_error['avdtz'][coord] + extrap_iext1_rpa['avdtz'][coord]
        if len(extrap_method_error['avtqz']) != 0:
            final_data['avtqz'][coord] = hf2[coord]['layer energy'] + rpa[coord]['layer energy'] + \
                extrap_method_error['avtqz'][coord] + extrap_iext1_rpa['avtqz'][coord]
    # record data
    for coord in coord_list:
        try:
            Results.record_data_json(final_data['avdtz'][coord],
                                     'final reslut avdtz', results_file,
                                     init_dist)
        except Exception as e:
            print(e)
        try:
            Results.record_data_json(final_data['avtqz'][coord],
                                     'final reslut avtqz', results_file,
                                     init_dist)
        except Exception as e:
            print(e)

    rec = 'Data processing finished.\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
示例#21
0
def submit(jobs, moni, if_finish_func, method='lmp2'):
    job_num = len(jobs)
    count = 0
    submitted_jobs = []
    finished_jobs = []
    max_calculations_dict = {'1': 6, '6': 2, '12': 5, '28': 3}

    def test_finished(jobs):
        """
        test jobs which have benn submitted is finished or not
        if a job finished, add it to list finished_jobs, and delete it from list submitted_jobs
        :param jobs:
        :return:
        """
        nonlocal count
        nonlocal count_dict
        for job in jobs[:]:
            if if_finish_func(job):
                finished_jobs.append(job)
                num = str(len(finished_jobs)) + '/' + str(job_num)
                rec = str(job)
                rec += '\n'
                rec += num + '  calculation finished.\n'
                rec += '---'*25
                print(rec)
                record(job.root_path, rec)
                jobs.remove(job)
                count -= 1
                count_dict[job.parameter['nodes']] -= 1

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_finish_func(job):
            finished_jobs.append(job)
            jobs.remove(job)
    # test if there is some jobs which are already submitted but not finished
    running_jobs = moni.get_running_jobs()
    for job in jobs[:]:
        if job in running_jobs:
            submitted_jobs.append(job)
            jobs.remove(job)

    # categorize jobs according to the nodes number
    jobs_dict = {}      # jobs dict according to the specific node
    count_dict = {}     # number of submitted jobs for each specific node
    nodes_list = []
    for job in jobs:
        node = job.parameter['nodes']
        if node not in nodes_list:
            nodes_list.append(node)
            jobs_dict[node] = [job]
            count_dict[node] = 0
        else:
            jobs_dict[node].append(job)

    # submit and detect all jobs
    j = 0
    while True:
        test_finished(submitted_jobs)   # update list finished_jobs and list submitted_jobs
        moni.update_status()
        if len(finished_jobs) == job_num and len(submitted_jobs) == 0:
            break
        else:
            # test_calculation(j, jobs, finished_jobs)
            for node in nodes_list:
                if count_dict[node] < max_calculations_dict[node] and len(jobs_dict[node]) > 0:
                    new_job = jobs_dict[node].pop()
                    os.chdir(new_job.path)
                    rename_file(new_job.path, '{}.out'.format(method))
                    out = submit_job(new_job, '{}'.format(method))
                    count += 1
                    count_dict[node] += 1
                    submitted_jobs.append(new_job)
                    moni.insert_new_job(new_job, out)
                    rec = str(new_job) + '\n'
                    rec += 'job submitted.'
                    rec += '\n' + out + '\n'
                    rec += '---'*25
                    record(new_job.root_path, rec)
                    print(rec)
                else:
                    # time.sleep(0.01)
                    time.sleep(500)
                    j += 1
                    if j > 15:
                        rec = 'noting changes.\n'
                        rec += '---'*25
                        record(submitted_jobs[0].root_path, rec)
                        j = 0
                    continue

    return finished_jobs
示例#22
0
def submit(jobs, moni):
    job_num = len(jobs)
    max_paralell = 8
    # max_paralell = 75
    count = 0
    submitted_jobs = []
    finished_jobs = []

    def test_finished(jobs):
        """
        test jobs which have benn submittdt is finished or not
        if a job finished, add it to list finished_jobs, and delete it from list submitted_jobs
        :param jobs:
        :return:
        """
        nonlocal count
        for job in jobs[:]:
            if if_cal_finish(job):
                finished_jobs.append(job)
                num = str(len(finished_jobs)) + '/' + str(job_num)
                rec = str(job)
                rec += '\n'
                rec += num + '  calculation finished.\n'
                rec += '---' * 25
                print(rec)
                record(job.root_path, rec)
                jobs.remove(job)
                count -= 1

    # test if there is some job which is already finished
    for job in jobs[:]:
        if if_cal_finish(job):
            finished_jobs.append(job)
            jobs.remove(job)
    # test if there is some jobs which are already submitted but not finished
    running_jobs = moni.get_running_jobs()
    for job in jobs[:]:
        if job in running_jobs:
            submitted_jobs.append(job)
            jobs.remove(job)

    # submit and detect all jobs
    j = 0
    while True:
        test_finished(submitted_jobs
                      )  # update list finished_jobs and list submitted_jobs
        moni.update_status()
        if len(finished_jobs) == job_num and len(submitted_jobs) == 0:
            break
        else:
            if count <= max_paralell and len(
                    jobs) > 0:  # check the number of jobs which is running now
                new_job = jobs.pop()
                os.chdir(new_job.path)
                out = submit_job(new_job, 'lmp2')
                count += 1
                submitted_jobs.append(new_job)
                moni.insert_new_job(new_job, out)
                rec = str(new_job) + '\n'
                rec += 'job submitted.'
                rec += '\n' + out + '\n'
                rec += '---' * 25
                record(new_job.root_path, rec)
                print(rec)
            else:
                time.sleep(500)
                # time.sleep(1)
                j += 1
                # test_calculation(j, jobs, submitted_jobs, finished_jobs)    # test function
                if j > 15:
                    rec = 'noting changes.\n'
                    rec += '---' * 25
                    record(submitted_jobs[0].root_path, rec)
                    j = 0
                continue

    return finished_jobs
示例#23
0
def lmp2(path, moni):

    rec = 'LMP2 Calculation begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    init_dist = read_init_dis(path)
    # read basic computation information
    hf2_jobs = LMP2.get_jobs(path)
    Ini = ReadIni()
    nodes, cryscor_path = Ini.get_lmp2()
    cal_parameters = Ini.get_cal_parameters('LMP2')
    ll = Ini.ll
    if nodes == '' or nodes == 'default':
        nodes = 1
    record_data_json(path, 'nodes', nodes, section='lmp2')

    # categorization
    bilayer = []
    singlelayer = []
    for job in hf2_jobs:
        if job.layertype == 'bilayer':
            bilayer.append(job)
        elif job.layertype == 'underlayer' or job.layertype == 'upperlayer':
            singlelayer.append(job)

    # generation of all input files and copy needed files
    lmp2_jobs = []
    lmp2_jobs_finished = []
    for job in bilayer:
        new_path = job.path
        new_path = new_path.replace('hf2', 'lmp2')
        new_job = Job(new_path)
        if not LMP2.if_cal_finish(new_job):
            Inp = LMP2.Lmp2Input(job, ll, cal_parameters)
            Inp.write_input()
            LMP2.copy_files(new_job, nodes, cryscor_path)
            lmp2_jobs.append(new_job)
        else:
            lmp2_jobs_finished.append(new_job)
    for job in singlelayer:
        new_path = job.path
        new_path = new_path.replace('hf2', 'lmp2')
        new_job = Job(new_path)
        if not LMP2.if_cal_finish(new_job):
            Inp = LMP2.Lmp2InputLayer(job, cal_parameters)
            Inp.write_input()
            LMP2.copy_files(new_job, nodes, cryscor_path)
            lmp2_jobs.append(new_job)
        else:
            lmp2_jobs_finished.append(new_job)

    # submit the jobs
    if len(lmp2_jobs) > 0:
        new_finished_jobs = LMP2.submit(lmp2_jobs, moni)
        lmp2_jobs_finished += new_finished_jobs
    # read calculation results
    # if len(lmp2_jobs_finished) > 0:
    #     LMP2.read_all_results_lmp2(lmp2_jobs_finished, init_distance=init_dist)

    rec = 'LMP2 finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)
示例#24
0
def correction(path, moni):

    rec = 'Correction begins.\n'
    rec += '---' * 25
    print(rec)
    record(path, rec)

    # get jobs
    cluster_jobs = get_jobs(path)
    init_dist = read_init_dis(path)

    # read infos from input.ini file
    Ini = ReadIni()
    project_name, *_ = Ini.get_basic_info()
    nodes, memorys, bs, molpro_path, molpro_key, atoms = Ini.get_correction()
    ll = Ini.ll
    record_data_json(path, 'memorys', memorys, section='correction')
    record_data_json(path, 'nodes', nodes, section='correction')

    # prepare input
    cluster_path = os.path.join(path, 'cluster')
    missions, nodes, memorys = get_missions(memorys, nodes)

    inputs = list(missions)
    inputs = [inp + '.inp' for inp in inputs]
    inputs_files = [os.path.join(cluster_path, inp) for inp in inputs]

    inputs, nodes, memorys = compare_inp_files(cluster_path, inputs, nodes,
                                               memorys)
    inputs_dict = {
        inp.split('.')[0]: os.path.join(cluster_path, inp)
        for inp in inputs
    }  # input file maybe exists in \cluster\ directory
    inputs = [inp.split('.')[0] for inp in inputs]

    # generation input
    correction_jobs = []
    correction_jobs_finished = []
    correction_jobs_dict = {inp: [] for inp in inputs}
    for job in cluster_jobs:
        for inp in inputs:
            new_job = deepcopy(job)
            new_job.method = inp
            new_job.parameter['node'] = nodes[inp]
            new_job.parameter['memory'] = memorys[inp]
            new_job.parameter['original_input_file'] = inputs_dict[inp]
            if not Correction.if_cal_finish(new_job):
                try:
                    print(str(new_job))
                    if not os.path.exists(inputs_dict[inp]):
                        print('{} file not found.'.format(inp))
                        print('Program will generate the input automatically.')
                        if new_job.method.startswith('per'):
                            Inp = Correction.InputPerRPA(
                                new_job,
                                project_name,
                                memorys[new_job.method],
                                uc_atoms=atoms,
                                ll=ll)
                            Inp.gen_inp()
                        # elif new_job.method.endswith('rpa_cc'):
                        elif '_cc' in new_job.method:
                            Inp = Correction.InputRPACC(
                                new_job,
                                project_name,
                                memorys[new_job.method],
                                uc_atoms=atoms,
                                ll=ll)
                            Inp.gen_inp()
                        # elif new_job.method.endswith('iext1_rpa'):
                        elif 'z_iext1' in new_job.method:
                            Inp = Correction.InputIext1RPA(
                                new_job,
                                project_name,
                                memorys[new_job.method],
                                uc_atoms=atoms,
                                ll=ll)
                            Inp.gen_inp()

                        # sys.exit()    #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!test
                    else:
                        if new_job.method.startswith('per'):
                            inp_name = new_job.method + '.inp'
                            MB = Correction.Molpro_Bs(new_job, inp_name)
                            MB.get_molpro_bs()
                            MB.write_bs()
                        else:
                            Correction.generation_input(new_job)
                    correction_jobs.append(new_job)
                    correction_jobs_dict[inp].append(new_job)
                    print('Input generated.')
                except Exception as e:
                    print(e)
                    print('Input faied to generate..')
                print('---' * 25)
            else:
                new_job.status = 'finished'
                correction_jobs_finished.append(new_job)

    # generate scr
    for key, value in correction_jobs_dict.items():
        for job in value:
            Scr = Correction.Script(job, molpro_key, molpro_path)
            Scr.write_scr()

    # submit jobs
    if len(correction_jobs) > 0:
        new_finished_jobs = Correction.submit(correction_jobs, moni)
        correction_jobs_finished += new_finished_jobs
    # read and record all results
    if len(correction_jobs_finished) != 0:
        Correction.read_all_results(correction_jobs_finished,
                                    init_distance=init_dist,
                                    ll=ll)

    rec = 'Correction finished!\n'
    rec += '***' * 25
    print(rec)
    record(path, rec)