Example #1
0
def run_enhc(iter_index,
             json_file,
             machine_json,
             base_dir='./'):
    json_file = os.path.abspath(json_file)
    base_dir = os.path.abspath(base_dir) + "/"
    iter_name = make_iter_name(iter_index)
    work_path = base_dir + iter_name + "/" + enhc_name + "/"

    fp = open(json_file, 'r')
    jdata = json.load(fp)
    fp.close()
    gmx_prep = jdata["gmx_prep"]
    gmx_run = jdata["gmx_run"]
    enhc_thread = jdata["enhc_thread"]
    gmx_run = gmx_run + (" -nt %d" % enhc_thread)
    gmx_prep_log = "gmx_grompp.log"
    gmx_run_log = "gmx_mdrun.log"
    # assuming at least one walker
    graph_files = glob.glob(work_path + (make_walker_name(0)) + "/*.pb")
    if len(graph_files) != 0:
        gmx_run = gmx_run + " -plumed " + enhc_plm
    else:
        gmx_run = gmx_run + " -plumed " + enhc_bf_plm
    gmx_prep_cmd = cmd_append_log(gmx_prep, gmx_prep_log)
    gmx_run_cmd = cmd_append_log(gmx_run, gmx_run_log)
    numb_walkers = jdata["numb_walkers"]

    all_task = list(filter(lambda x: os.path.isdir(
        x),  glob.glob(work_path + "/[0-9]*[0-9]")))
    all_task.sort()

    all_task_basedir = [os.path.relpath(ii, work_path) for ii in all_task]
    print('run_enhc:work_path', work_path)
    print('run_enhc:gmx_prep_cmd:', gmx_prep_cmd)
    print('run_enhc:gmx_run_cmd:', gmx_run_cmd)
    print('run_enhc:all_task:', all_task)
    print('run_enhc:all_task_basedir:', all_task_basedir)

    machine = set_machine(machine_json, target="enhcMD")
    resources = set_resource(machine_json, target="enhcMD")

    gmx_prep_task = [Task(command=gmx_prep_cmd, task_work_path=ii,
                          outlog='gmx_grompp.log', errlog='gmx_grompp.log') for ii in all_task_basedir]
    gmx_prep_submission = Submission(
        work_base=work_path, machine=machine, resources=resources, task_list=gmx_prep_task)

    gmx_prep_submission.run_submission()

    gmx_run_task = [Task(command=gmx_run_cmd, task_work_path=ii,
                         outlog='gmx_mdrun.log', errlog='gmx_mdrun.log') for ii in all_task_basedir]
    gmx_run_submission = Submission(
        work_base=work_path, machine=machine, resources=resources, task_list=gmx_run_task)
    gmx_run_submission.run_submission()
Example #2
0
def post_enhc(iter_index, json_file, machine_json, base_dir="./"):
    base_dir = os.path.abspath(base_dir) + "/"
    iter_name = make_iter_name(iter_index)
    work_path = base_dir + iter_name + "/" + enhc_name + "/"
    json_file = os.path.abspath(json_file)
    json_file = os.path.abspath(json_file)
    fp = open(json_file, 'r')
    jdata = json.load(fp)
    fp.close()
    gmx_split = jdata["gmx_split_traj"]
    gmx_split_log = "gmx_split.log"
    gmx_split_cmd = cmd_append_log(gmx_split, gmx_split_log)

    all_task = list(
        filter(lambda x: os.path.isdir(x),
               glob.glob(work_path + "/[0-9]*[0-9]")))
    all_task.sort()

    cwd = os.getcwd()
    numb_walkers = jdata["numb_walkers"]
    for ii in range(numb_walkers):
        walker_path = work_path + make_walker_name(ii) + "/"
        os.chdir(walker_path)
        if os.path.isdir("confs"):
            shutil.rmtree("confs")
        os.makedirs("confs")
        os.chdir(cwd)

    print('rid.py:post_enhc:gmx_split_cmd', gmx_split_cmd)
    print('rid.py:post_enhc:work path', work_path)

    machine = set_machine(machine_json, target="post_enhc")
    resources = set_resource(machine_json, target="post_enhc")
    all_task_relpath = [os.path.relpath(ii, work_path) for ii in all_task]
    gmx_split_task = [
        Task(command=gmx_split_cmd,
             task_work_path=ii,
             outlog='gmx_split.log',
             errlog='gmx_split.log') for ii in all_task_relpath
    ]
    gmx_split_submission = Submission(work_base=work_path,
                                      resources=resources,
                                      machine=machine,
                                      task_list=gmx_split_task)
    gmx_split_submission.run_submission()

    for ii in range(numb_walkers):
        walker_path = work_path + make_walker_name(ii) + "/"
        angles = np.loadtxt(walker_path + enhc_out_plm)
        np.savetxt(walker_path + enhc_out_angle, angles[:, 1:], fmt="%.6f")
    print("Post process of enhanced sampling finished.")
Example #3
0
def run_res(iter_index, json_file, machine_json, base_dir="./"):
    json_file = os.path.abspath(json_file)
    fp = open(json_file, 'r')
    jdata = json.load(fp)
    fp.close()
    gmx_prep = jdata["gmx_prep"]
    gmx_run = jdata["gmx_run"]
    res_thread = jdata["res_thread"]
    gmx_run = gmx_run + (" -nt %d" % res_thread)
    gmx_run = gmx_run + " -plumed " + res_plm
    # gmx_cont_run = gmx_run + " -cpi state.cpt"
    gmx_prep_log = "gmx_grompp.log"
    gmx_run_log = "gmx_mdrun.log"
    gmx_prep_cmd = cmd_append_log(gmx_prep, gmx_prep_log)
    gmx_run_cmd = cmd_append_log(gmx_run, gmx_run_log)
    # gmx_cont_run_cmd = cmd_append_log (gmx_cont_run, gmx_run_log)

    base_dir = os.path.abspath(base_dir) + "/"
    iter_name = make_iter_name(iter_index)
    res_path = base_dir + iter_name + "/" + res_name + "/"

    if not os.path.isdir(res_path):
        raise RuntimeError("do not see any restrained simulation (%s)." %
                           res_path)

    all_task = list(
        filter(lambda x: os.path.isdir(x),
               glob.glob(res_path + "/[0-9]*[0-9]")))
    print('run_res:all_task_propose:', all_task)
    print('run_res:gmx_prep_cmd:', gmx_prep_cmd)
    print('run_res:gmx_run_cmd:', gmx_run_cmd)
    # print('run_res:gmx_cont_run_cmd:', gmx_cont_run_cmd)

    if len(all_task) == 0:
        return None
    all_task.sort()
    all_task_basedir = [os.path.relpath(ii, res_path) for ii in all_task]

    res_resources = set_resource(machine_json, target="resMD")
    machine = set_machine(machine_json, target="resMD")

    gmx_prep_task = [
        Task(command=gmx_prep_cmd,
             task_work_path=ii,
             outlog='gmx_grompp.log',
             errlog='gmx_grompp.log') for ii in all_task_basedir
    ]
    gmx_prep_submission = Submission(work_base=res_path,
                                     machine=machine,
                                     resources=res_resources,
                                     task_list=gmx_prep_task)
    gmx_prep_submission.run_submission()

    gmx_run_task = [
        Task(command=gmx_run_cmd,
             task_work_path=ii,
             outlog='gmx_mdrun.log',
             errlog='gmx_mdrun.log') for ii in all_task_basedir
    ]
    gmx_run_submission = Submission(work_base=res_path,
                                    machine=machine,
                                    resources=res_resources,
                                    task_list=gmx_run_task)
    gmx_run_submission.run_submission()
Example #4
0
 def test_cmd_append_log(self):
     cmd_log = utils.cmd_append_log("gmx mdrun -deffnm em", 'md.log')
     _cmd_log = "gmx mdrun -deffnm em 1> md.log 2> md.log"
     self.assertTrue(cmd_log, _cmd_log)
Example #5
0
def run_train(iter_index, json_file, machine_json, cv_file, base_dir="./"):
    json_file = os.path.abspath(json_file)
    cv_file = os.path.abspath(cv_file)
    fp = open(json_file, 'r')
    jdata = json.load(fp)
    fp.close()
    cv_file = os.path.abspath(cv_file)
    numb_model = jdata["numb_model"]
    train_thread = jdata["train_thread"]
    res_iter = jdata["res_iter"]
    base_dir = os.path.abspath(base_dir) + "/"
    iter_name = make_iter_name(iter_index)
    train_path = base_dir + iter_name + "/" + train_name + "/"
    if check_new_data(iter_index, train_path, base_dir):
        return

    enhc_path = base_dir + iter_name + "/" + enhc_name + "/"
    _conf_file = enhc_path + "000/conf.gro"
    cv_dim_list = cal_cv_dim(_conf_file, cv_file)

    cwd = os.getcwd()
    neurons = jdata["neurons"]
    batch_size = jdata["batch_size"]
    if iter_index < res_iter:
        numb_epoches = jdata["numb_epoches"]
        starter_lr = jdata["starter_lr"]
        decay_steps = jdata["decay_steps"]
        decay_rate = jdata["decay_rate"]
        cmdl_args = ""
    else:
        numb_epoches = jdata["res_numb_epoches"]
        starter_lr = jdata["res_starter_lr"]
        decay_steps = jdata["res_decay_steps"]
        decay_rate = jdata["res_decay_rate"]
        old_ratio = jdata["res_olddata_ratio"]
        cmdl_args = " --restart --use-mix --old-ratio %f " % old_ratio

    if jdata["resnet"]:
        cmdl_args += " --resnet "
    cmdl_args += " -n "
    for nn in neurons:
        cmdl_args += "%d " % nn
    cmdl_args += " -c "
    for cv_dim in cv_dim_list:
        cmdl_args += "%d " % cv_dim
    cmdl_args += " -b " + str(batch_size)
    cmdl_args += " -e " + str(numb_epoches)
    cmdl_args += " -l " + str(starter_lr)
    cmdl_args += " --decay-steps " + str(decay_steps)
    cmdl_args += " --decay-rate " + str(decay_rate)

    train_cmd = "python3 {}/train.py -t {:d}".format(NN_PATH, train_thread)
    train_cmd += cmdl_args
    train_cmd = cmd_append_log(train_cmd, "train.log")
    freez_cmd = "python3 {}/freeze.py -o graph.pb".format(NN_PATH)
    freez_cmd = cmd_append_log(freez_cmd, "freeze.log")
    task_dirs = [("%03d" % ii) for ii in range(numb_model)]

    print('lib.modeling.run_train:train_cmd:', train_cmd)
    print('lib.modeling.run_train:freez_cmd:', freez_cmd)
    print('lib.modeling.run_train:train_path:', train_path)
    print('lib.modeling.run_train:task_dirs:', task_dirs)

    resources = set_resource(machine_json, target="train")
    machine = set_machine(machine_json, target="train")

    train_task = [
        Task(command=train_cmd,
             task_work_path=ii,
             outlog='train.log',
             errlog='train.log') for ii in task_dirs
    ]
    train_submission = Submission(work_base=train_path,
                                  machine=machine,
                                  resources=resources,
                                  task_list=train_task)
    train_submission.run_submission()

    freez_task = [
        Task(command=freez_cmd,
             task_work_path=ii,
             outlog='freeze.log',
             errlog='freeze.log') for ii in task_dirs
    ]
    freez_submission = Submission(work_base=train_path,
                                  machine=machine,
                                  resources=resources,
                                  task_list=freez_task)
    freez_submission.run_submission()

    os.chdir(train_path)
    for ii in range(numb_model):
        os.symlink("%03d/graph.pb" % ii, "graph.%03d.pb" % ii)
    os.chdir(cwd)

    print("Training finished!")