예제 #1
0
def make_property(confs, inter_param, property_list):
    # find all POSCARs and their name like mp-xxx
    # ...
    # conf_dirs = glob.glob(confs)
    # conf_dirs.sort()
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    for ii in conf_dirs:
        sepline(ch=ii, screen=True)
        for jj in property_list:
            if jj.get("skip", False):
                continue
            if 'init_from_suffix' and 'output_suffix' in jj:
                do_refine = True
                suffix = jj['output_suffix']
            elif 'reproduce' in jj and jj['reproduce']:
                do_refine = False
                suffix = 'reprod'
            else:
                do_refine = False
                suffix = '00'
            # generate working directory like mp-xxx/eos_00 if jj['type'] == 'eos'
            # handel the exception that the working directory exists
            # ...

            # determine the suffix: from scratch or refine
            # ...

            property_type = jj['type']
            path_to_equi = os.path.join(ii, 'relaxation', 'relax_task')
            path_to_work = os.path.join(ii, property_type + '_' + suffix)

            if os.path.exists(path_to_work):
                dlog.warning('%s already exists' % path_to_work)
            else:
                os.makedirs(path_to_work)

            prop = make_property_instance(jj)
            task_list = prop.make_confs(path_to_work, path_to_equi, do_refine)

            inter_param_prop = inter_param
            if 'cal_setting' in jj and 'overwrite_interaction' in jj[
                    'cal_setting']:
                inter_param_prop = jj['cal_setting']['overwrite_interaction']

            for kk in task_list:
                poscar = os.path.join(kk, 'POSCAR')
                inter = make_calculator(inter_param_prop, poscar)
                inter.make_potential_files(kk)
                dlog.debug(prop.task_type())  ### debug
                inter.make_input_file(kk, prop.task_type(), prop.task_param())

            prop.post_process(
                task_list
            )  # generate same KPOINTS file for elastic when doing VASP
예제 #2
0
파일: reaction.py 프로젝트: zhuhui-in/dpgen
def gen_init_reaction(args):
    try:
        import ruamel
        from monty.serialization import loadfn, dumpfn
        warnings.simplefilter('ignore',
                              ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
        jdata = loadfn(args.PARAM)
        if args.MACHINE is not None:
            mdata = loadfn(args.MACHINE)
    except:
        with open(args.PARAM, 'r') as fp:
            jdata = json.load(fp)
        if args.MACHINE is not None:
            with open(args.MACHINE, "r") as fp:
                mdata = json.load(fp)

    record = "record.reaction"
    iter_rec = -1
    numb_task = 7
    if os.path.isfile(record):
        with open(record) as frec:
            for line in frec:
                iter_rec = int(line.strip())
        dlog.info("continue from task %02d" % iter_rec)
    for ii in range(numb_task):
        sepline(str(ii), '-')
        if ii <= iter_rec:
            continue
        elif ii == 0:
            link_reaxff(jdata)
        elif ii == 1:
            dispatcher = make_dispatcher(mdata["reaxff_machine"])
            run_reaxff(jdata, mdata, dispatcher)
        elif ii == 2:
            link_trj(jdata)
        elif ii == 3:
            dispatcher = make_dispatcher(mdata["build_machine"])
            run_build_dataset(jdata, mdata, dispatcher)
        elif ii == 4:
            link_fp_input()
        elif ii == 5:
            dispatcher = make_dispatcher(mdata["fp_machine"])
            run_fp(jdata, mdata, dispatcher)
        elif ii == 6:
            convert_data(jdata)
        with open(record, "a") as frec:
            frec.write(str(ii) + '\n')
예제 #3
0
def run_property(confs, inter_param, property_list, mdata):
    # find all POSCARs and their name like mp-xxx
    # ...
    # conf_dirs = glob.glob(confs)
    # conf_dirs.sort()
    processes = len(property_list)
    pool = Pool(processes=processes)
    print("Submit job via %d processes" % processes)
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    task_list = []
    work_path_list = []
    for ii in conf_dirs:
        sepline(ch=ii, screen=True)
        for jj in property_list:
            # determine the suffix: from scratch or refine
            # ...
            if jj.get("skip", False):
                continue
            if 'init_from_suffix' and 'output_suffix' in jj:
                suffix = jj['output_suffix']
            elif 'reproduce' in jj and jj['reproduce']:
                suffix = 'reprod'
            else:
                suffix = '00'

            property_type = jj['type']
            path_to_work = os.path.abspath(
                os.path.join(ii, property_type + '_' + suffix))

            work_path_list.append(path_to_work)
            tmp_task_list = glob.glob(
                os.path.join(path_to_work, 'task.[0-9]*[0-9]'))
            tmp_task_list.sort()
            task_list.append(tmp_task_list)

            inter_param_prop = inter_param
            if 'cal_setting' in jj and 'overwrite_interaction' in jj[
                    'cal_setting']:
                inter_param_prop = jj['cal_setting']['overwrite_interaction']

            # dispatch the tasks
            # POSCAR here is useless
            virtual_calculator = make_calculator(inter_param_prop, "POSCAR")
            forward_files = virtual_calculator.forward_files(property_type)
            forward_common_files = virtual_calculator.forward_common_files(
                property_type)
            backward_files = virtual_calculator.backward_files(property_type)
            #    backward_files += logs
            # ...
            inter_type = inter_param_prop['type']
            # vasp
            if inter_type == "vasp":
                mdata = decide_fp_machine(mdata)
            elif inter_type in lammps_task_type:
                mdata = decide_model_devi_machine(mdata)
            else:
                raise RuntimeError("unknown task %s, something wrong" %
                                   inter_type)

            work_path = path_to_work
            all_task = tmp_task_list
            run_tasks = util.collect_task(all_task, inter_type)
            if len(run_tasks) == 0:
                return
            else:
                ret = pool.apply_async(worker, (
                    work_path,
                    all_task,
                    forward_common_files,
                    forward_files,
                    backward_files,
                    mdata,
                    inter_type,
                ))
            # run_tasks = [os.path.basename(ii) for ii in all_task]
            # machine, resources, command, group_size = util.get_machine_info(mdata, inter_type)
            # disp = make_dispatcher(machine, resources, work_path, run_tasks, group_size)
            # disp.run_jobs(resources,
            #               command,
            #               work_path,
            #               run_tasks,
            #               group_size,
            #               forward_common_files,
            #               forward_files,
            #               backward_files,
            #               outlog='outlog',
            #               errlog='errlog')
    pool.close()
    pool.join()
    if ret.successful():
        print('finished')
예제 #4
0
def run_iter(param_file, machine_file):
    """ init (iter 0): init_pick

    tasks (iter > 0):
    00 make_train (same as generator)
    01 run_train (same as generator)
    02 post_train (same as generator)
    03 make_model_devi
    04 run_model_devi
    05 post_model_devi
    06 make_fp
    07 run_fp (same as generator)
    08 post_fp (same as generator)
    """
    # TODO: function of handling input json should be combined as one function
    try:
        import ruamel
        from monty.serialization import loadfn, dumpfn
        warnings.simplefilter('ignore',
                              ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
        jdata = loadfn(param_file)
        mdata = loadfn(machine_file)
    except:
        with open(param_file, 'r') as fp:
            jdata = json.load(fp)
        with open(machine_file, 'r') as fp:
            mdata = json.load(fp)

    if jdata.get('pretty_print', False):
        fparam = SHORT_CMD+'_' + \
            param_file.split('.')[0]+'.'+jdata.get('pretty_format', 'json')
        dumpfn(jdata, fparam, indent=4)
        fmachine = SHORT_CMD+'_' + \
            machine_file.split('.')[0]+'.'+jdata.get('pretty_format', 'json')
        dumpfn(mdata, fmachine, indent=4)

    if mdata.get('handlers', None):
        if mdata['handlers'].get('smtp', None):
            que = queue.Queue(-1)
            queue_handler = logging.handlers.QueueHandler(que)
            smtp_handler = logging.handlers.SMTPHandler(
                **mdata['handlers']['smtp'])
            listener = logging.handlers.QueueListener(que, smtp_handler)
            dlog.addHandler(queue_handler)
            listener.start()

    max_tasks = 10000
    numb_task = 9
    record = "record.dpgen"
    iter_rec = [0, -1]
    if os.path.isfile(record):
        with open(record) as frec:
            for line in frec:
                iter_rec = [int(x) for x in line.split()]
        dlog.info("continue from iter %03d task %02d" %
                  (iter_rec[0], iter_rec[1]))

    cont = True
    ii = -1
    while cont:
        ii += 1
        iter_name = make_iter_name(ii)
        sepline(iter_name, '=')
        for jj in range(numb_task):
            if ii * max_tasks + jj <= iter_rec[0] * max_tasks + iter_rec[1]:
                continue
            task_name = "task %02d" % jj
            sepline("{} {}".format(iter_name, task_name), '-')
            jdata['model_devi_jobs'] = [{} for _ in range(ii + 1)]
            if ii == 0 and jj < 6:
                if jj == 0:
                    log_iter("init_pick", ii, jj)
                    init_pick(ii, jdata, mdata)
                dlog.info("first iter, skip step 1-5")
            elif jj == 0:
                log_iter("make_train", ii, jj)
                make_train(ii, jdata, mdata)
            elif jj == 1:
                log_iter("run_train", ii, jj)
                mdata = decide_train_machine(mdata)
                disp = make_dispatcher(mdata['train_machine'])
                run_train(ii, jdata, mdata)
            elif jj == 2:
                log_iter("post_train", ii, jj)
                post_train(ii, jdata, mdata)
            elif jj == 3:
                log_iter("make_model_devi", ii, jj)
                cont = make_model_devi(ii, jdata, mdata)
                if not cont or ii >= jdata.get("stop_iter", ii + 1):
                    break
            elif jj == 4:
                log_iter("run_model_devi", ii, jj)
                mdata = decide_model_devi_machine(mdata)
                disp = make_dispatcher(mdata['model_devi_machine'])
                run_model_devi(ii, jdata, mdata, disp)
            elif jj == 5:
                log_iter("post_model_devi", ii, jj)
                post_model_devi(ii, jdata, mdata)
            elif jj == 6:
                log_iter("make_fp", ii, jj)
                make_fp(ii, jdata, mdata)
            elif jj == 7:
                log_iter("run_fp", ii, jj)
                if jdata.get("labeled", False):
                    dlog.info("already have labeled data, skip run_fp")
                else:
                    mdata = decide_fp_machine(mdata)
                    disp = make_dispatcher(mdata['fp_machine'])
                    run_fp(ii, jdata, mdata)
            elif jj == 8:
                log_iter("post_fp", ii, jj)
                if jdata.get("labeled", False):
                    dlog.info("already have labeled data, skip post_fp")
                else:
                    post_fp(ii, jdata)
            else:
                raise RuntimeError("unknown task %d, something wrong" % jj)
            record_iter(record, ii, jj)
예제 #5
0
def run_property(confs, inter_param, property_list, mdata):
    # find all POSCARs and their name like mp-xxx
    # ...
    # conf_dirs = glob.glob(confs)
    # conf_dirs.sort()
    processes = len(property_list)
    pool = Pool(processes=processes)
    print("Submit job via %d processes" % processes)
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    task_list = []
    work_path_list = []
    multiple_ret = []
    for ii in conf_dirs:
        sepline(ch=ii, screen=True)
        for jj in property_list:
            # determine the suffix: from scratch or refine
            # ...
            if jj.get("skip", False):
                continue
            if 'init_from_suffix' and 'output_suffix' in jj:
                suffix = jj['output_suffix']
            elif 'reproduce' in jj and jj['reproduce']:
                suffix = 'reprod'
            else:
                suffix = '00'

            property_type = jj['type']
            path_to_work = os.path.abspath(
                os.path.join(ii, property_type + '_' + suffix))

            work_path_list.append(path_to_work)
            tmp_task_list = glob.glob(
                os.path.join(path_to_work, 'task.[0-9]*[0-9]'))
            tmp_task_list.sort()
            task_list.append(tmp_task_list)

            inter_param_prop = inter_param
            if 'cal_setting' in jj and 'overwrite_interaction' in jj[
                    'cal_setting']:
                inter_param_prop = jj['cal_setting']['overwrite_interaction']

            # dispatch the tasks
            # POSCAR here is useless
            virtual_calculator = make_calculator(inter_param_prop, "POSCAR")
            forward_files = virtual_calculator.forward_files(property_type)
            forward_common_files = virtual_calculator.forward_common_files(
                property_type)
            backward_files = virtual_calculator.backward_files(property_type)
            #    backward_files += logs
            # ...
            inter_type = inter_param_prop['type']
            # vasp
            if inter_type == "vasp":
                mdata = convert_mdata(mdata, ["fp"])
            elif inter_type in lammps_task_type:
                mdata = convert_mdata(mdata, ["model_devi"])
            else:
                raise RuntimeError("unknown task %s, something wrong" %
                                   inter_type)

            work_path = path_to_work
            all_task = tmp_task_list
            run_tasks = util.collect_task(all_task, inter_type)
            if len(run_tasks) == 0:
                continue
            else:
                ret = pool.apply_async(worker, (
                    work_path,
                    all_task,
                    forward_common_files,
                    forward_files,
                    backward_files,
                    mdata,
                    inter_type,
                ))
                multiple_ret.append(ret)
    pool.close()
    pool.join()
    for ii in range(len(multiple_ret)):
        if not multiple_ret[ii].successful():
            raise RuntimeError("Job %d is not successful!" % ii)
    print('%d jobs are finished' % len(multiple_ret))
예제 #6
0
파일: VASP.py 프로젝트: picodase/dpgen
    def make_input_file(self, output_dir, task_type, task_param):
        sepline(ch=output_dir)
        dumpfn(task_param, os.path.join(output_dir, 'task.json'), indent=4)

        assert (os.path.exists(self.incar)), 'no INCAR file for relaxation'
        relax_incar_path = os.path.abspath(self.incar)
        incar_relax = incar_upper(Incar.from_file(relax_incar_path))

        # deal with relaxation

        cal_type = task_param['cal_type']
        cal_setting = task_param['cal_setting']

        # user input INCAR for property calculation
        if 'input_prop' in cal_setting and os.path.isfile(
                cal_setting['input_prop']):
            incar_prop = os.path.abspath(cal_setting['input_prop'])
            incar = incar_upper(Incar.from_file(incar_prop))

        # revise INCAR based on the INCAR provided in the "interaction"
        else:
            incar = incar_relax
            if cal_type == 'relaxation':
                relax_pos = cal_setting['relax_pos']
                relax_shape = cal_setting['relax_shape']
                relax_vol = cal_setting['relax_vol']
                if [relax_pos, relax_shape, relax_vol] == [True, False, False]:
                    isif = 2
                elif [relax_pos, relax_shape, relax_vol] == [True, True, True]:
                    isif = 3
                elif [relax_pos, relax_shape,
                      relax_vol] == [True, True, False]:
                    isif = 4
                elif [relax_pos, relax_shape,
                      relax_vol] == [False, True, False]:
                    isif = 5
                elif [relax_pos, relax_shape,
                      relax_vol] == [False, True, True]:
                    isif = 6
                elif [relax_pos, relax_shape,
                      relax_vol] == [False, False, True]:
                    isif = 7
                elif [relax_pos, relax_shape,
                      relax_vol] == [False, False, False]:
                    nsw = 0
                    isif = 2
                    if not ('NSW' in incar and incar.get('NSW') == nsw):
                        dlog.info("%s setting NSW to %d" %
                                  (self.make_input_file.__name__, nsw))
                        incar['NSW'] = nsw
                else:
                    raise RuntimeError(
                        "not supported calculation setting for VASP")

                if not ('ISIF' in incar and incar.get('ISIF') == isif):
                    dlog.info("%s setting ISIF to %d" %
                              (self.make_input_file.__name__, isif))
                    incar['ISIF'] = isif

            elif cal_type == 'static':
                nsw = 0
                if not ('NSW' in incar and incar.get('NSW') == nsw):
                    dlog.info("%s setting NSW to %d" %
                              (self.make_input_file.__name__, nsw))
                    incar['NSW'] = nsw

            else:
                raise RuntimeError("not supported calculation type for VASP")

            if 'ediff' in cal_setting:
                dlog.info(
                    "%s setting EDIFF to %s" %
                    (self.make_input_file.__name__, cal_setting['ediff']))
                incar['EDIFF'] = cal_setting['ediff']

            if 'ediffg' in cal_setting:
                dlog.info(
                    "%s setting EDIFFG to %s" %
                    (self.make_input_file.__name__, cal_setting['ediffg']))
                incar['EDIFFG'] = cal_setting['ediffg']

            if 'encut' in cal_setting:
                dlog.info(
                    "%s setting ENCUT to %s" %
                    (self.make_input_file.__name__, cal_setting['encut']))
                incar['ENCUT'] = cal_setting['encut']

            if 'kspacing' in cal_setting:
                dlog.info(
                    "%s setting KSAPCING to %s" %
                    (self.make_input_file.__name__, cal_setting['kspacing']))
                incar['KSAPCING'] = cal_setting['kspacing']

            if 'kgamma' in cal_setting:
                dlog.info(
                    "%s setting KGAMMA to %s" %
                    (self.make_input_file.__name__, cal_setting['kgamma']))
                incar['KGAMMA'] = cal_setting['kgamma']

        try:
            kspacing = incar.get('KSPACING')
        except KeyError:
            raise RuntimeError("KSPACING must be given in INCAR")

        if 'KGAMMA' in incar:
            kgamma = incar.get('KGAMMA')
        else:
            kgamma = False

        incar.write_file(os.path.join(output_dir, '../INCAR'))
        cwd = os.getcwd()
        os.chdir(output_dir)
        if not os.path.islink('INCAR'):
            os.symlink('../INCAR', 'INCAR')
        elif not '../INCAR' == os.readlink('INCAR'):
            os.remove('INCAR')
            os.symlink('../INCAR', 'INCAR')
        os.chdir(cwd)
        ret = vasp.make_kspacing_kpoints(self.path_to_poscar, kspacing, kgamma)
        kp = Kpoints.from_string(ret)
        kp.write_file(os.path.join(output_dir, "KPOINTS"))