'aevsize': aevsize, 'num_nets': Nnets, 'atomtyp': ['H', 'C', 'N', 'O'] } ## Train the ensemble ## aet = alt.alaniensembletrainer(netdir, netdict, h5stor, Nnets) aet.build_strided_training_cache(Nblock, Nbvald, Nbtest, False) aet.train_ensemble(GPU) if i < 5: ldtdir = root_dir # local data directories if not os.path.exists(root_dir + datdir + str(i + 1).zfill(2)): os.mkdir(root_dir + datdir + str(i + 1).zfill(2)) ## Run active learning sampling ## acs = alt.alconformationalsampler(ldtdir, datdir + str(i + 1).zfill(2), optlfile, fpatoms, netdict) #acs.run_sampling_cluster(gcmddict, GPU) #acs.run_sampling_dimer(dmrparams, GPU) #acs.run_sampling_nms(nmsparams, GPU) #acs.run_sampling_md(mdsparams, perc=0.5, gpus=GPU) acs.run_sampling_TS(tsparams, gpus=GPU, perc=0.5) #acs.run_sampling_dhl(dhparams, gpus=GPU+GPU) #acs.run_sampling_TS(tsparams, gpus=GPU) #exit(0) ## Submit jobs, return and pack data ast.generateQMdata(hostname, username, swkdir, ldtdir, datdir + str(i + 1).zfill(2), h5stor, mae, jtime)
except FileExistsError: pass try: os.mkdir(new_datdir) except FileExistsError: pass netdict = { 'iptfile': iptfile_path, 'cnstfile': cstfile_path, 'saefile': saefile_path, 'nnfprefix': nnfprefix, 'aevsize': aevsize, 'num_nets': Nnets, 'atomtyp': ['H', 'C', 'N', 'O'] } # Train the ensemble ani_ensemble_trainer = alt.alaniensembletrainer(netdir, netdict, h5dataset_path, Nnets) ani_ensemble_trainer.build_strided_training_cache(Nblock, Nbvald, Nbtest, False) ani_ensemble_trainer.train_ensemble(GPU) local_data_dir = root_dir # Run active learning sampling ani_conformational_sampler = alt.alconformationalsampler( local_data_dir, dat, optlfile_path, fpatoms, netdict) ani_conformational_sampler.run_sampling_dhl(dhparams, gpus=GPU + GPU) # Submit jobs, return and pack data aniserver.generateQMdata(hostname, username, swkdir, local_data_dir, dat, h5dataset_path, mae, jtime)