コード例 #1
0
    def compute(self, output_file, print_file, path_to_work):
        """
        Postprocess the finished tasks to compute the property.
        Output the result to a json database

        Parameters
        ----------
        output_file:
                The file to output the property in json format
        print_file:
                The file to output the property in txt format
        path_to_work:
                The working directory where the computational tasks locate.
        """
        path_to_work = os.path.abspath(path_to_work)
        task_dirs = glob.glob(os.path.join(path_to_work, 'task.[0-9]*[0-9]'))
        task_dirs.sort()
        all_res = []
        for ii in task_dirs:
            with open(os.path.join(ii, 'inter.json')) as fp:
                idata = json.load(fp)
            poscar = os.path.join(ii, 'POSCAR')
            task = make_calculator(idata, poscar)
            res = task.compute(ii)
            dumpfn(res, os.path.join(ii, 'result_task.json'), indent=4)
            # all_res.append(res)
            all_res.append(os.path.join(ii, 'result_task.json'))

        # cwd = os.getcwd()
        # os.chdir(path_to_work)
        res, ptr = self._compute_lower(output_file, task_dirs, all_res)
        #        with open(output_file, 'w') as fp:
        #            json.dump(fp, res, indent=4)
        with open(print_file, 'w') as fp:
            fp.write(ptr)
コード例 #2
0
ファイル: common_prop.py プロジェクト: picodase/dpgen
def make_property(confs, inter_param, property_list):
    # find all POSCARs and their name like mp-xxx
    # ...
    # conf_dirs = glob.glob(confs)
    # conf_dirs.sort()
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    for ii in conf_dirs:
        sepline(ch=ii, screen=True)
        for jj in property_list:
            if jj.get("skip", False):
                continue
            if 'init_from_suffix' and 'output_suffix' in jj:
                do_refine = True
                suffix = jj['output_suffix']
            elif 'reproduce' in jj and jj['reproduce']:
                do_refine = False
                suffix = 'reprod'
            else:
                do_refine = False
                suffix = '00'
            # generate working directory like mp-xxx/eos_00 if jj['type'] == 'eos'
            # handel the exception that the working directory exists
            # ...

            # determine the suffix: from scratch or refine
            # ...

            property_type = jj['type']
            path_to_equi = os.path.join(ii, 'relaxation', 'relax_task')
            path_to_work = os.path.join(ii, property_type + '_' + suffix)

            if os.path.exists(path_to_work):
                dlog.warning('%s already exists' % path_to_work)
            else:
                os.makedirs(path_to_work)

            prop = make_property_instance(jj)
            task_list = prop.make_confs(path_to_work, path_to_equi, do_refine)

            inter_param_prop = inter_param
            if 'cal_setting' in jj and 'overwrite_interaction' in jj[
                    'cal_setting']:
                inter_param_prop = jj['cal_setting']['overwrite_interaction']

            for kk in task_list:
                poscar = os.path.join(kk, 'POSCAR')
                inter = make_calculator(inter_param_prop, poscar)
                inter.make_potential_files(kk)
                dlog.debug(prop.task_type())  ### debug
                inter.make_input_file(kk, prop.task_type(), prop.task_param())

            prop.post_process(
                task_list
            )  # generate same KPOINTS file for elastic when doing VASP
コード例 #3
0
def run_equi(confs,
             inter_param,
             mdata):
    # find all POSCARs and their name like mp-xxx
    # ...
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    # generate a list of task names like mp-xxx/relaxation/relax_task
    # ...
    work_path_list = []
    for ii in conf_dirs:
        work_path_list.append(os.path.abspath(os.path.join(ii, 'relaxation')))
    all_task = []
    for ii in work_path_list:
        all_task.append(os.path.join(ii, 'relax_task'))

    inter_type = inter_param['type']
    # vasp
    if inter_type == "vasp":
        mdata = decide_fp_machine(mdata)
    elif inter_type in lammps_task_type:
        mdata = decide_model_devi_machine(mdata)
    else:
        raise RuntimeError("unknown task %s, something wrong" % inter_type)

    # dispatch the tasks
    # POSCAR here is useless
    virtual_calculator = make_calculator(inter_param, "POSCAR")
    forward_files = virtual_calculator.forward_files()
    forward_common_files = virtual_calculator.forward_common_files()
    backward_files = virtual_calculator.backward_files()
    #    backward_files += logs
    # ...
    run_tasks = util.collect_task(all_task, inter_type)
    if len(run_tasks) == 0:
        return
    else:
        run_tasks = [os.path.basename(ii) for ii in all_task]
        machine, resources, command, group_size = util.get_machine_info(mdata, inter_type)
        print('%d tasks will be submited '%len(run_tasks))
        for ii in range(len(work_path_list)):
            work_path = work_path_list[ii]
            disp = make_dispatcher(machine, resources, work_path, [run_tasks[ii]], group_size)
            print("%s --> Runing... "%(work_path))
            disp.run_jobs(resources,
                          command,
                          work_path,
                          [run_tasks[ii]],
                          group_size,
                          forward_common_files,
                          forward_files,
                          backward_files,
                          outlog='outlog',
                          errlog='errlog')
コード例 #4
0
def post_equi(confs, inter_param):
    # find all POSCARs and their name like mp-xxx
    # ...
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    task_dirs = []
    for ii in conf_dirs:
        task_dirs.append(os.path.abspath(os.path.join(ii, 'relaxation', 'relax_task')))
    task_dirs.sort()

    # generate a list of task names like mp-xxx/relaxation
    # ...

    # dump the relaxation result.
    for ii in task_dirs:
        poscar = os.path.join(ii, 'POSCAR')
        inter = make_calculator(inter_param, poscar)
        res = inter.compute(ii)
        dumpfn(res, os.path.join(ii, 'result.json'), indent=4)
コード例 #5
0
ファイル: common_prop.py プロジェクト: picodase/dpgen
def run_property(confs, inter_param, property_list, mdata):
    # find all POSCARs and their name like mp-xxx
    # ...
    # conf_dirs = glob.glob(confs)
    # conf_dirs.sort()
    processes = len(property_list)
    pool = Pool(processes=processes)
    print("Submit job via %d processes" % processes)
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    task_list = []
    work_path_list = []
    for ii in conf_dirs:
        sepline(ch=ii, screen=True)
        for jj in property_list:
            # determine the suffix: from scratch or refine
            # ...
            if jj.get("skip", False):
                continue
            if 'init_from_suffix' and 'output_suffix' in jj:
                suffix = jj['output_suffix']
            elif 'reproduce' in jj and jj['reproduce']:
                suffix = 'reprod'
            else:
                suffix = '00'

            property_type = jj['type']
            path_to_work = os.path.abspath(
                os.path.join(ii, property_type + '_' + suffix))

            work_path_list.append(path_to_work)
            tmp_task_list = glob.glob(
                os.path.join(path_to_work, 'task.[0-9]*[0-9]'))
            tmp_task_list.sort()
            task_list.append(tmp_task_list)

            inter_param_prop = inter_param
            if 'cal_setting' in jj and 'overwrite_interaction' in jj[
                    'cal_setting']:
                inter_param_prop = jj['cal_setting']['overwrite_interaction']

            # dispatch the tasks
            # POSCAR here is useless
            virtual_calculator = make_calculator(inter_param_prop, "POSCAR")
            forward_files = virtual_calculator.forward_files(property_type)
            forward_common_files = virtual_calculator.forward_common_files(
                property_type)
            backward_files = virtual_calculator.backward_files(property_type)
            #    backward_files += logs
            # ...
            inter_type = inter_param_prop['type']
            # vasp
            if inter_type == "vasp":
                mdata = decide_fp_machine(mdata)
            elif inter_type in lammps_task_type:
                mdata = decide_model_devi_machine(mdata)
            else:
                raise RuntimeError("unknown task %s, something wrong" %
                                   inter_type)

            work_path = path_to_work
            all_task = tmp_task_list
            run_tasks = util.collect_task(all_task, inter_type)
            if len(run_tasks) == 0:
                return
            else:
                ret = pool.apply_async(worker, (
                    work_path,
                    all_task,
                    forward_common_files,
                    forward_files,
                    backward_files,
                    mdata,
                    inter_type,
                ))
            # run_tasks = [os.path.basename(ii) for ii in all_task]
            # machine, resources, command, group_size = util.get_machine_info(mdata, inter_type)
            # disp = make_dispatcher(machine, resources, work_path, run_tasks, group_size)
            # disp.run_jobs(resources,
            #               command,
            #               work_path,
            #               run_tasks,
            #               group_size,
            #               forward_common_files,
            #               forward_files,
            #               backward_files,
            #               outlog='outlog',
            #               errlog='errlog')
    pool.close()
    pool.join()
    if ret.successful():
        print('finished')
コード例 #6
0
def make_equi(confs, inter_param, relax_param):
    # find all POSCARs and their name like mp-xxx
    # ...
    dlog.debug('debug info make equi')
    if 'type_map' in inter_param:
        ele_list = [key for key in inter_param['type_map'].keys()]
    else:
        ele_list = [key for key in inter_param['potcars'].keys()]
    # ele_list = inter_param['type_map']
    dlog.debug("ele_list %s" % ':'.join(ele_list))
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()

    # generate a list of task names like mp-xxx/relaxation/relax_task
    # ...
    cwd = os.getcwd()
    # generate poscar for single element crystal
    if len(ele_list) == 1:
        for ii in conf_dirs:
            os.chdir(ii)
            crys_type = ii.split('/')[-1]
            dlog.debug('crys_type: %s' % crys_type)
            dlog.debug('pwd: %s' % os.getcwd())
            if crys_type == 'std-fcc':
                if not os.path.exists('POSCAR'):
                    crys.fcc1(ele_list[0]).to('POSCAR', 'POSCAR')
            elif crys_type == 'std-hcp':
                if not os.path.exists('POSCAR'):
                    crys.hcp(ele_list[0]).to('POSCAR', 'POSCAR')
            elif crys_type == 'std-dhcp':
                if not os.path.exists('POSCAR'):
                    crys.dhcp(ele_list[0]).to('POSCAR', 'POSCAR')
            elif crys_type == 'std-bcc':
                if not os.path.exists('POSCAR'):
                    crys.bcc(ele_list[0]).to('POSCAR', 'POSCAR')
            elif crys_type == 'std-diamond':
                if not os.path.exists('POSCAR'):
                    crys.diamond(ele_list[0]).to('POSCAR', 'POSCAR')
            elif crys_type == 'std-sc':
                if not os.path.exists('POSCAR'):
                    crys.sc(ele_list[0]).to('POSCAR', 'POSCAR')

            os.chdir(cwd)
    task_dirs = []
    # make task directories like mp-xxx/relaxation/relax_task
    # if mp-xxx/exists then print a warning and exit.
    # ...
    for ii in conf_dirs:
        crys_type = ii.split('/')[-1]
        dlog.debug('crys_type: %s' % crys_type)

        if 'mp-' in crys_type and not os.path.exists(os.path.join(
                ii, 'POSCAR')):
            get_structure(crys_type).to('POSCAR', os.path.join(ii, 'POSCAR'))

        poscar = os.path.abspath(os.path.join(ii, 'POSCAR'))
        if not os.path.exists(poscar):
            raise FileNotFoundError('no configuration for autotest')
        relax_dirs = os.path.abspath(
            os.path.join(ii, 'relaxation', 'relax_task')
        )  # to be consistent with property in make dispatcher
        if os.path.exists(relax_dirs):
            dlog.warning('%s already exists' % relax_dirs)
        else:
            os.makedirs(relax_dirs)
        task_dirs.append(relax_dirs)
        os.chdir(relax_dirs)
        # copy POSCARs to mp-xxx/relaxation/relax_task
        # ...
        if os.path.isfile('POSCAR'):
            os.remove('POSCAR')
        os.symlink(os.path.relpath(poscar), 'POSCAR')
        os.chdir(cwd)
    task_dirs.sort()
    # generate task files
    relax_param['cal_type'] = 'relaxation'
    if 'cal_setting' not in relax_param:
        relax_param['cal_setting'] = {
            "relax_pos": True,
            "relax_shape": True,
            "relax_vol": True
        }
    elif "relax_pos" not in relax_param['cal_setting']:
        relax_param['cal_setting']['relax_pos'] = True
    elif "relax_shape" not in relax_param['cal_setting']:
        relax_param['cal_setting']['relax_shape'] = True
    elif "relax_vol" not in relax_param['cal_setting']:
        relax_param['cal_setting']['relax_vol'] = True

    for ii in task_dirs:
        poscar = os.path.join(ii, 'POSCAR')
        dlog.debug('task_dir %s' % ii)
        inter = make_calculator(inter_param, poscar)
        inter.make_potential_files(ii)
        inter.make_input_file(ii, 'relaxation', relax_param)
コード例 #7
0
def run_equi(confs, inter_param, mdata):
    # find all POSCARs and their name like mp-xxx
    # ...
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    # generate a list of task names like mp-xxx/relaxation/relax_task
    # ...
    work_path_list = []
    for ii in conf_dirs:
        work_path_list.append(os.path.abspath(os.path.join(ii, 'relaxation')))
    all_task = []
    for ii in work_path_list:
        all_task.append(os.path.join(ii, 'relax_task'))

    inter_type = inter_param['type']
    # vasp
    if inter_type == "vasp":
        mdata = convert_mdata(mdata, ["fp"])
    elif inter_type in lammps_task_type:
        mdata = convert_mdata(mdata, ["model_devi"])
    else:
        raise RuntimeError("unknown task %s, something wrong" % inter_type)

    # dispatch the tasks
    # POSCAR here is useless
    virtual_calculator = make_calculator(inter_param, "POSCAR")
    forward_files = virtual_calculator.forward_files()
    forward_common_files = virtual_calculator.forward_common_files()
    backward_files = virtual_calculator.backward_files()
    #    backward_files += logs
    # ...
    run_tasks = util.collect_task(all_task, inter_type)
    if len(run_tasks) == 0:
        return
    else:
        # if LooseVersion()
        run_tasks = [os.path.basename(ii) for ii in all_task]
        machine, resources, command, group_size = util.get_machine_info(
            mdata, inter_type)
        print('%d tasks will be submited ' % len(run_tasks))
        for ii in range(len(work_path_list)):
            work_path = work_path_list[ii]
            disp = make_dispatcher(machine, resources, work_path,
                                   [run_tasks[ii]], group_size)
            print("%s --> Runing... " % (work_path))

        api_version = mdata.get('api_version', '0.9')
        if LooseVersion(api_version) < LooseVersion('1.0'):
            warnings.warn(
                f"the dpdispatcher will be updated to new version."
                f"And the interface may be changed. Please check the documents for more details"
            )
            disp.run_jobs(resources,
                          command,
                          work_path, [run_tasks[ii]],
                          group_size,
                          forward_common_files,
                          forward_files,
                          backward_files,
                          outlog='outlog',
                          errlog='errlog')
        elif LooseVersion(api_version) >= LooseVersion('1.0'):
            submission = make_submission(
                mdata_machine=machine,
                mdata_resource=resources,
                commands=[command],
                work_path=work_path,
                run_tasks=run_tasks,
                group_size=group_size,
                forward_common_files=forward_common_files,
                forward_files=forward_files,
                backward_files=backward_files,
                outlog='outlog',
                errlog='errlog')
            submission.run_submission()
コード例 #8
0
def run_property(confs, inter_param, property_list, mdata):
    # find all POSCARs and their name like mp-xxx
    # ...
    # conf_dirs = glob.glob(confs)
    # conf_dirs.sort()
    processes = len(property_list)
    pool = Pool(processes=processes)
    print("Submit job via %d processes" % processes)
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()
    task_list = []
    work_path_list = []
    multiple_ret = []
    for ii in conf_dirs:
        sepline(ch=ii, screen=True)
        for jj in property_list:
            # determine the suffix: from scratch or refine
            # ...
            if jj.get("skip", False):
                continue
            if 'init_from_suffix' and 'output_suffix' in jj:
                suffix = jj['output_suffix']
            elif 'reproduce' in jj and jj['reproduce']:
                suffix = 'reprod'
            else:
                suffix = '00'

            property_type = jj['type']
            path_to_work = os.path.abspath(
                os.path.join(ii, property_type + '_' + suffix))

            work_path_list.append(path_to_work)
            tmp_task_list = glob.glob(
                os.path.join(path_to_work, 'task.[0-9]*[0-9]'))
            tmp_task_list.sort()
            task_list.append(tmp_task_list)

            inter_param_prop = inter_param
            if 'cal_setting' in jj and 'overwrite_interaction' in jj[
                    'cal_setting']:
                inter_param_prop = jj['cal_setting']['overwrite_interaction']

            # dispatch the tasks
            # POSCAR here is useless
            virtual_calculator = make_calculator(inter_param_prop, "POSCAR")
            forward_files = virtual_calculator.forward_files(property_type)
            forward_common_files = virtual_calculator.forward_common_files(
                property_type)
            backward_files = virtual_calculator.backward_files(property_type)
            #    backward_files += logs
            # ...
            inter_type = inter_param_prop['type']
            # vasp
            if inter_type == "vasp":
                mdata = convert_mdata(mdata, ["fp"])
            elif inter_type in lammps_task_type:
                mdata = convert_mdata(mdata, ["model_devi"])
            else:
                raise RuntimeError("unknown task %s, something wrong" %
                                   inter_type)

            work_path = path_to_work
            all_task = tmp_task_list
            run_tasks = util.collect_task(all_task, inter_type)
            if len(run_tasks) == 0:
                continue
            else:
                ret = pool.apply_async(worker, (
                    work_path,
                    all_task,
                    forward_common_files,
                    forward_files,
                    backward_files,
                    mdata,
                    inter_type,
                ))
                multiple_ret.append(ret)
    pool.close()
    pool.join()
    for ii in range(len(multiple_ret)):
        if not multiple_ret[ii].successful():
            raise RuntimeError("Job %d is not successful!" % ii)
    print('%d jobs are finished' % len(multiple_ret))
コード例 #9
0
def run_equi(confs, inter_param, mdata):
    # find all POSCARs and their name like mp-xxx
    # ...
    conf_dirs = []
    for conf in confs:
        conf_dirs.extend(glob.glob(conf))
    conf_dirs.sort()

    processes = len(conf_dirs)
    pool = Pool(processes=processes)
    print("Submit job via %d processes" % processes)

    # generate a list of task names like mp-xxx/relaxation/relax_task
    # ...
    work_path_list = []
    for ii in conf_dirs:
        work_path_list.append(os.path.abspath(os.path.join(ii, 'relaxation')))
    all_task = []
    for ii in work_path_list:
        all_task.append(os.path.join(ii, 'relax_task'))

    inter_type = inter_param['type']
    # vasp
    if inter_type == "vasp":
        mdata = convert_mdata(mdata, ["fp"])
    elif inter_type in lammps_task_type:
        mdata = convert_mdata(mdata, ["model_devi"])
    else:
        raise RuntimeError("unknown task %s, something wrong" % inter_type)

    # dispatch the tasks
    # POSCAR here is useless
    virtual_calculator = make_calculator(inter_param, "POSCAR")
    forward_files = virtual_calculator.forward_files()
    forward_common_files = virtual_calculator.forward_common_files()
    backward_files = virtual_calculator.backward_files()
    #    backward_files += logs
    # ...
    run_tasks = util.collect_task(all_task, inter_type)
    if len(run_tasks) == 0:
        return
    else:
        run_tasks = [os.path.basename(ii) for ii in all_task]
        machine, resources, command, group_size = util.get_machine_info(
            mdata, inter_type)
        print('%d tasks will be submited ' % len(run_tasks))
        multiple_ret = []
        for ii in range(len(work_path_list)):
            work_path = work_path_list[ii]

            ret = pool.apply_async(worker, (
                work_path,
                run_tasks[ii],
                forward_common_files,
                forward_files,
                backward_files,
                mdata,
                inter_type,
            ))
            multiple_ret.append(ret)
        pool.close()
        pool.join()
        for ii in range(len(multiple_ret)):
            if not multiple_ret[ii].successful():
                raise RuntimeError(
                    "Task %d is not successful! work_path: %s " %
                    (ii, work_path_list[ii]))
        print('finished')