Esempio n. 1
0
def main(args):
    """ """
    with open(args.param, "r") as f:
        param = json.load(f)
    print("use the env {} ".format(param["env_name"]))
    print(param)
    print("Start Programm in {}  mode".format(args.mode))
    env = gym.make(param["env_name"])
    print(env.action_space.n)
    if args.mode == "search":
        param["lr_pre"] = args.lr_pre
        param["lr"] = args.lr
        param["fc1_units"] = args.fc1_units
        param["fc2_units"] = args.fc2_units
        param["clip"] = args.clip
        param["buffer_path"] = args.buffer_path
    param["locexp"] = args.locexp
    mkdir(args.locexp, "")
    text = str(param)
    write_into_file(str(param["locexp"]) + "/hyperparameters", text)
    if args.mode == "iql":
        train(env, param)
    if args.mode == "eval":
        eval_policy(env, param)
    if args.mode == "vid":
        create_vid(env, param)
Esempio n. 2
0
    def _pdb2prmtop_mmpbsa(pdb_path, igb, out_path=None):
        '''
        A method to generate dry prmtop file for MMPBSA
        TODO generalize and combine with PDB2FF fragments
        '''
        radii = radii_map[str(igb)]
        if out_path == None:
            out_path = pdb_path[:-4] + '.prmtop'
        mkdir('./tmp')
        with open('./tmp/leap_pdb2prmtop.in', 'w') as of:
            of.write('source leaprc.protein.ff14SB' + line_feed)
            of.write('inp = loadpdb ' + pdb_path + line_feed)
            of.write('set default PBRadii ' + radii + line_feed)
            of.write('saveamberparm inp ' + out_path + ' ./tmp/tmp.inpcrd' +
                     line_feed)
            of.write('quit' + line_feed)

        try:
            run('tleap -s -f ./tmp/leap_pdb2prmtop.in',
                check=True,
                text=True,
                shell=True,
                capture_output=True)
        except CalledProcessError as err:
            raise TrajCalcERROR(err.stderr)
        # clean
        if Config.debug < 2:
            run('rm leap.log',
                check=0,
                text=True,
                shell=True,
                capture_output=True)

        return out_path
Esempio n. 3
0
def make_NoisePulseHeightTable(path, results) :
	if not path.endswith('/') : path += '/'
	helper.mkdir(path)
	table_name = 'NoisePulseHeightTable.tex'
	print '[status] writing %s' % table_name
	with open(path + table_name, 'w') as file :
		timestamp = time.asctime()
		file.write('%!TEX root = ../../Dissertation.tex\n')
		file.write('\n\n')
		file.write('\\begin{tabular}{\n')
		file.write('\tl\n')
		file.write('\tS[table-number-alignment = center, table-format = -4.0, retain-explicit-plus]\n')
		file.write('\tS\n')
		file.write('\tS[table-number-alignment = center, table-format = 4.1]\n')
		file.write('}\n')
		file.write('\t\\toprule\n')
		file.write('\tRun   & {Voltage (\\si{\\volt})} & {Noise (ADC Counts)} & {Pulse Height Mean (ADC Counts)} \\\\\n')
		file.write('\t\\midrule\n')
		for run in results :
			file.write('\t%5d & %5s & %5.1f & %6.1f \\\\\n' % (
				run,
				results[run]['Voltage'],
				results[run]['Noise'],
				results[run]['PulseHeight']))
		file.write('\t\\bottomrule\n')
		file.write('\\end{tabular}')
Esempio n. 4
0
def clone(url: str, jina_dir: str = default_clone_dir()):
    if not is_git_installed():
        raise GitNotInstalled(
            'You don\'t have git installed. Are you even a developer?')

    repo_name = url.split('/')[-1]
    repo_dir = os.path.join(jina_dir, repo_name.rstrip('.git'))
    print(f'Directory to be created: {jina_dir}')
    mkdir(jina_dir)

    if is_valid_dir(repo_dir):
        while True:
            delete = input(
                f'You\'ve used {repo_name.rstrip(".git")} before, do you want to download it again? (yes|no): '
            )
            if delete == 'yes':
                rmdir(repo_dir)
                break
            elif delete == 'no':
                return repo_dir
            else:
                print('Invalid option! Please choose from (yes|no)')

    try:
        subprocess.check_output(['git', 'clone', url],
                                cwd=jina_dir,
                                stderr=subprocess.STDOUT)
    except Exception as ex:
        rmdir(jina_dir)
        if 'not found' in ex.stdout.decode():
            raise InvalidRepo('Invalid repo!')
        raise

    return repo_dir
Esempio n. 5
0
def main():
    # params file
    txt_path = sys.argv[1]

    if not txt_path:
        print('Path of params .txt file is required!')
        sys.exit(0)

    params = get_params(txt_path=txt_path)

    anime_file_path = os.path.join('animes', f"{params.get('anime')}.txt")
    # verify anime path
    if not helper.path_exists(anime_file_path):
        print(f"Anime \"{params.get('anime')}\" doesn't exists, try again.")
        sys.exit(0)

    output_folder = params.get('output_folder')
    anime_name = params.get('name')

    # create output folder if not exists
    helper.mkdir(output_folder)
    # videos urls
    urls_data = get_urls(path=anime_file_path)
    # remove temp files
    remove_temp_files(output_folder=output_folder)

    for url_data in urls_data:
        download_video(
            url_data=url_data,
            output_folder=output_folder,
            anime_name=anime_name,
        )
Esempio n. 6
0
	def __init__(self, config_file, path, output_path, runlog_file) :
		self.config_file = config_file
		if not path.endswith('/') : path += '/'
		self.path = path
		if not output_path.endswith('/') : output_path += '/'
		self.output_path = output_path
		helper.mkdir(self.output_path)
		self.runlog = runlog.runlog(runlog_file)
Esempio n. 7
0
def mover(filePath, emotion):
	filename = filePath.rsplit("/",1)[1]
	speakerID = filename.split("_",1)[0]
	print filename, speakerID, emotion
	if not emotion == "xxx": 
		mkdir(os.path.join("CleanedIEMOCAP", emotion))
		mkdir(os.path.join("CleanedIEMOCAP", emotion, speakerID))
		shutil.move(filePath, os.path.join("CleanedIEMOCAP", emotion, speakerID, filename))
	def __init__(self, path, input_path, run_config) :
		if not path.endswith('/') : path += '/'
		self.path = path
		helper.mkdir(self.path)
		if not input_path.endswith('/') : input_path += '/'
		self.input_path = input_path
		self.runs = self.read_runs(run_config)
		self.runs_list = sorted(self.runs.keys())
Esempio n. 9
0
def _save_session(sess, checkpoint_dir):
    saver = tf.train.Saver()
    current_date_time = helper.get_current_date_time()
    current_date_time = current_date_time.replace(" ", "__").replace(
        "/", "_").replace(":", "_")
    helper.mkdir(checkpoint_dir)
    saver.save(
        sess,
        os.path.join(checkpoint_dir, "ckpt_{}.ckpt".format(current_date_time)))
Esempio n. 10
0
    def saveParam(self, dir='tmp', mode=0):
        subdir = ''

        if mode == 1:
            subdir = 'history/%s/' % (dir)

        fulldir = 'data/%s/%s' % (self.dir, subdir)

        mkdir(fulldir)
        self.saver.save(self.sess, '%s%s' % (fulldir, self.ckptFile))
Esempio n. 11
0
def mover(filePath, emotion):
    filename = filePath.rsplit("/", 1)[1]
    speakerID = filename.split("_", 1)[0]
    print filename, speakerID, emotion
    if not emotion == "xxx":
        mkdir(os.path.join("CleanedIEMOCAP", emotion))
        mkdir(os.path.join("CleanedIEMOCAP", emotion, speakerID))
        shutil.move(
            filePath,
            os.path.join("CleanedIEMOCAP", emotion, speakerID, filename))
Esempio n. 12
0
def gitClone(repoCloneDir, repo):
    proName, repoName, gitAddr = repo
    helper.mkdir(repoCloneDir)
    try:
        git.Git(repoCloneDir).clone(gitAddr)
        helper.configSonarProperty(repoName)

        updateCloneStatus(proName, repoName)
    except:
        print("repo has been cloned!!!")
Esempio n. 13
0
def _set_up_monitoring(env, config):
    """wrap the environment to allow rendering and set up a save directory"""
    helper.mkdir(os.path.join(".",
                              *config["monitor_dir"],
                              config["env_name"]))
    current_date_time = helper.get_current_date_time()
    current_date_time = current_date_time.replace(" ", "__").replace("/", "_").replace(":", "_")

    monitor_path = os.path.join(".", *config["monitor_dir"], config["env_name"], current_date_time)
    env = wrappers.Monitor(env, monitor_path)
    return env
Esempio n. 14
0
    def calc_MMPBSA(cls,
                    traj_list,
                    frag_str,
                    igb=5,
                    out_dir='',
                    in_file='',
                    use_parmed=1,
                    prepare_only=0):
        '''
        1. update_Radii
        2. make_dry_frags
        3. run_MMPBSA
        -----
        traj_list: a list of Traj_calc objects
        frag_str:   A str to define two fragments
                (Grammer)
                - same as pymol select grammer (So that you can confirm selection via pymol)
                - Use ':' to seperate two fragment (In the order of: receptor - ligand . This order only matters in the correspondance in the MMPBSA output)
                * DO NOT use original chain id in the pdb file. Count from A and from 1.
        igb:        igb method used for MMGBSA (relate to Radii change)
        out_dir:    output dir of the data file
        in_file:    MMPBSA.in
        use_parmed: if use parmed the change the Radii
        prepare_only: if prepare the prmtop files only. (With accre this is nessessary since the Amber and MMPBSA is install with a lower version of Python and kills everythings use conda.)
        '''
        # clean out path
        if out_dir == '':
            out_dir = './'
        else:
            mkdir(out_dir)
        if out_dir[-1] != '/':
            out_dir = out_dir + '/'

        data_files = []

        for traj in traj_list:
            try:
                if use_parmed:
                    traj.update_Radii(igb=igb)
                    traj.make_dry_frags(frag_str, igb=igb)
                else:
                    traj.make_dry_frags(frag_str, igb=igb, if_sol=1)
                if not prepare_only:
                    data_file = traj.run_MMPBSA(in_file=in_file,
                                                out_path=out_dir + traj.name +
                                                '.dat')
                    data_files.append(data_file)
                else:
                    data_files.append(
                        (traj.name, traj.nc, traj.prmtop, traj.dl_prmtop,
                         traj.dr_prmtop, traj.dc_prmtop))
            except TrajCalcERROR as err:
                print('ERROR:', err)
        return data_files
Esempio n. 15
0
 def save_model(self):
     helper.mkdir(
         os.path.join(".", *self.config["general"]["checkpoint_path"],
                      self.config["general"]["env_name"]))
     current_date_time = helper.get_current_date_time()
     current_date_time = current_date_time.replace(" ", "__").replace(
         "/", "_").replace(":", "_")
     np.save(
         os.path.join(".", *self.config["general"]["checkpoint_path"],
                      self.config["general"]["env_name"],
                      "ckpt_" + current_date_time), self.weights)
    def save(self):
        """Save the network weights"""
        save_dir = os.path.join(".", *self.config["checkpoint_dir"],
                                self.config["env_name"])
        helper.mkdir(save_dir)
        current_date_time = helper.get_current_date_time()
        current_date_time = current_date_time.replace(" ", "__").replace(
            "/", "_").replace(":", "_")

        torch.save(self.model.state_dict(),
                   os.path.join(save_dir, "ckpt_" + current_date_time))
 def __init__(self, config_file, path, output_path, runlog_file, suffix="", run_config_file=""):
     self.config_file = config_file
     if not path.endswith("/"):
         path += "/"
     self.path = path
     if not output_path.endswith("/"):
         output_path += "/"
     self.output_path = output_path
     helper.mkdir(self.output_path)
     self.runlog = runlog.runlog(runlog_file)
     self.suffix = suffix
     self.run_config_file = run_config_file
Esempio n. 18
0
def refactor():
	mkdir("CleanedIEMOCAP")

	for i in range(1,6):
		sesh = "Session{}".format(i)
		for convos in listdir(os.path.join(currdir, sesh, "wav")):
			speakerID = convos.split("_")[0]
			
			transcriptionLoc = os.path.join(currdir, sesh, "Eval", convos+".txt")
			emotionDict = fileParser(transcriptionLoc)
			
			currLoc = os.path.join(currdir, sesh, "wav", convos)
			for utteranceWav in listdir(currLoc):	
				utteranceID = utteranceWav.rstrip(".wav")
				mover(os.path.join(currLoc, utteranceWav), emotionDict[utteranceID])
    def save(self):
        """Save the network weights"""
        save_dir = os.path.join(".", *self.config["checkpoint_dir"],
                                self.config["project_name"])
        helper.mkdir(save_dir)
        current_date_time = helper.get_current_date_time()
        current_date_time = current_date_time.replace(" ", "__").replace(
            "/", "_").replace(":", "_")

        torch.save(
            self.generator.state_dict(),
            os.path.join(save_dir, "generator_ckpt_" + current_date_time))
        torch.save(
            self.discriminator.state_dict(),
            os.path.join(save_dir, "discriminator_ckpt_" + current_date_time))
 def eval_policy(self,
                 record=False,
                 random_policy=False,
                 eval_episodes=2,
                 eval_policy=False,
                 steps=0):
     #env = wrappers.Monitor(self.env, str(self.vid_path) + "/{}".format(self.steps), video_callable=lambda episode_id: True, force=True)
     env = self.env
     average_reward = 0
     scores_window = deque(maxlen=100)
     s = 0
     for i_epiosde in range(eval_episodes):
         episode_reward = 0
         state = env.reset("mediumClassic")
         mkdir("vid/{}".format(i_epiosde), "")
         while True:
             s += 1
             if random_policy:
                 action = env.action_space.sample()
             else:
                 action = self.act(state)
             state, reward, done, info = env.step(action)
             cv2.imwrite("{}/{}/pic-{}.png".format("vid", i_epiosde, s),
                         np.array(state))
             episode_reward += reward
             if done:
                 #cv2.imwrite("{}/{}/pic-{}.png".format("vid", i_epiosde, episode_reward), np.array(info))
                 break
         scores_window.append(episode_reward)
     if record:
         return
     average_reward = np.mean(scores_window)
     min_reward = np.min(scores_window)
     max_reward = np.max(scores_window)
     if eval_policy:
         self.writer.add_scalar('Eval_reward', average_reward, steps)
         self.writer.add_scalar('Eval_min', min_reward, steps)
         self.writer.add_scalar('Eval_max', max_reward, steps)
         print("Eval Episode {}  average Reward {} ".format(
             eval_episodes, average_reward))
         return
     if random_policy:
         print("Eval Episode {}  average Reward {} ".format(
             eval_episodes, average_reward))
     else:
         print(" Random policy Eval Episode {}  average Reward {} ".format(
             eval_episodes, average_reward))
     self.writer.add_scalar('Eval_reward', average_reward, self.steps)
Esempio n. 21
0
def start():
    pull.PullProcess()
    sourcePathBase = os.getcwd() + "/" + cf.get("server", "gitCloneAddr")
    targetPathBase = os.getcwd() + "/" + cf.get("server", "sonarTempAddr")
    for repo in pull.getCloneRepos():
        proName, repoName, gitAddr, projId = repo
        sourcePath = sourcePathBase + "/" + repoName
        targetPath = targetPathBase + "/" + repoName
        helper.mkdir(targetPath)
        helper.copyFiles(sourcePath, targetPath)

        sonarScan.runSonarScanner(targetPath)

        os.system('rmdir /S /Q "{}"'.format(targetPath))
        addSonarResult(sonarResultAnalysis.getIssueNumberOfRepo(repoName),
                       sonarResultAnalysis.getMetricsOfRepo(repoName), projId,
                       repoName)
    def save(self, filename):
        """
        """
        mkdir("", filename)
        torch.save(self.predicter.state_dict(), filename + "_predicter.pth")
        torch.save(self.optimizer_pre.state_dict(),
                   filename + "_predicter_optimizer.pth")
        torch.save(self.qnetwork_local.state_dict(), filename + "_q_net.pth")
        torch.save(self.optimizer.state_dict(),
                   filename + "_q_net_optimizer.pth")
        torch.save(self.R_local.state_dict(), filename + "_r_net.pth")
        torch.save(self.q_shift_local.state_dict(),
                   filename + "_q_shift_net.pth")
        # save also vae and mdrnn
        torch.save(self.model.state_dict(), filename + "_model_net.pth")

        print("save models to {}".format(filename))
Esempio n. 23
0
def refactor():
    mkdir("CleanedIEMOCAP")

    for i in range(1, 6):
        sesh = "Session{}".format(i)
        for convos in listdir(os.path.join(currdir, sesh, "wav")):
            speakerID = convos.split("_")[0]

            transcriptionLoc = os.path.join(currdir, sesh, "Eval",
                                            convos + ".txt")
            emotionDict = fileParser(transcriptionLoc)

            currLoc = os.path.join(currdir, sesh, "wav", convos)
            for utteranceWav in listdir(currLoc):
                utteranceID = utteranceWav.rstrip(".wav")
                mover(os.path.join(currLoc, utteranceWav),
                      emotionDict[utteranceID])
	def __init__(self, ped_path, path = 'output') :
#		self.raw_path     = raw_path
		self.ped_path     = ped_path
#		self.raw_file     = ROOT.TFile.Open(self.raw_path, 'READ')
		self.ped_file     = ROOT.TFile.Open(self.ped_path, 'READ')
#		self.raw_tree     = self.raw_file.Get('rawTree')
		self.ped_tree     = self.ped_file.Get('pedestalTree')
#		if self.raw_tree.GetEntries() != self.ped_tree.GetEntries() :
#			print '[ERROR] check trees!'
#			sys.exit(1)
#		self.ped_tree.GetListOfFriends().Clear()
#		friend = self.ped_tree.GetListOfFriends().FindObject('rawTree')
#		self.ped_tree.GetListOfFriends().Remove(friend)
		self.nevents = int(self.ped_tree.GetEntries())
		self.path = path
		if not self.path.endswith('/') : self.path += '/'
		helper.mkdir(self.path)
	def __init__(self, config_file, path, output_path, run_no, position, histo_type, run_config_file = '', event_number = 0) :
		self.config_file = config_file
		self.config = ConfigParser.ConfigParser()
		self.config.optionxform = str # case sensitive options
		self.config.read(config_file)
		self.run_no = int(run_no)
		self.position = position
		self.histo_type = histo_type
		if not path.endswith('/') : path += '/'
		self.path = '%s%s' % (path, self.run_no)
		if self.position != '' :
			self.path += '/%s/' % self.position
		else : self.path += '/'
		if not output_path.endswith('/') : output_path += '/'
		self.output_path = '%s%s/' % (output_path, self.run_no)
		self.event_number = event_number
		rd42Style()

		for key, value in self.config.items(histo_type) :
			value = value.replace('EVENTNUMBER', '%d' % self.event_number)
			setattr(self, key, value)
		if not hasattr(self, 'name') :
			self.name = self.histo_name
		if not hasattr(self, 'variable') :
			self.variable = histo_type
		if not hasattr(self, 'nstrips') :
			self.nstrips = 0
		else :
			self.nstrips = int(self.nstrips)
		if hasattr(self, 'output_dir') :
			self.output_path += self.output_dir
			if not self.output_path.endswith('/') : self.output_path += '/'
		helper.mkdir(self.output_path)
		if hasattr(self, 'rebin') :
			self.rebin = int(self.rebin)
		self.root_file = self.root_file.replace('RUNNUMBER', '.%d' % self.run_no)
		self.file_path = self.path + self.root_file
		self.rand = ROOT.TRandom3(0)

		self.run_config_file = run_config_file
		if self.run_config_file != '' :
			self.run_config = ConfigParser.ConfigParser()
			self.run_config.optionxform = str # case sensitive options
			self.run_config.read(run_config_file)
Esempio n. 26
0
    def update_Radii(self, igb=5):
        '''
        Update Radii for the MMGBSA calculation
        ---
        igb:        gb method used

        update self.prmtop to new_parm_path

        TODO support 3A MMPBSA
        '''
        # get radii
        radii = radii_map[str(igb)]

        mkdir('./tmp')
        prmtop_path = self.prmtop
        new_prmtop_path = prmtop_path[:-7] + '_' + radii + '.prmtop'
        # change Radii
        with open('./tmp/parmed.in', 'w') as of:
            of.write('changeRadii ' + radii + line_feed)
            of.write('parmout ' + new_prmtop_path + line_feed)
        try:
            run('parmed -p ' + prmtop_path + ' -i ./tmp/parmed.in',
                check=True,
                text=True,
                shell=True,
                capture_output=True)
        except CalledProcessError as err:
            raise TrajCalcERROR(err.stderr)
        self.prmtop = new_prmtop_path
        # clean
        if Config.debug < 2:
            try:
                run('rm parmed.log',
                    check=True,
                    text=True,
                    shell=True,
                    capture_output=True)
            except CalledProcessError:
                pass

        return new_prmtop_path
Esempio n. 27
0
    def save_memory(self, filename):
        """
        Use numpy save function to store the data in a given file
        """
        mkdir("", filename)

        with open(filename + '/obses.npy', 'wb') as f:
            np.save(f, self.obses)

        with open(filename + '/actions.npy', 'wb') as f:
            np.save(f, self.actions)

        with open(filename + '/next_obses.npy', 'wb') as f:
            np.save(f, self.next_obses)

        with open(filename + '/not_dones.npy', 'wb') as f:
            np.save(f, self.not_dones)

        with open(filename + '/index.txt', 'w') as f:
            f.write("{}".format(self.idx))
        print("save buffer to {}".format(filename))
Esempio n. 28
0
	def __init__(self, config_file, path, output_path, run_no, position, histo_type) :
		self.config = ConfigParser.ConfigParser()
		self.config.optionxform = str # case sensitive options
		self.config.read(config_file)
		self.run_no = run_no
		self.position = position
		self.histo_type = histo_type
		if not path.endswith('/') : path += '/'
		self.path = '%s%s' % (path, self.run_no)
		if self.position != '' :
			self.path += '/%s/' % self.position
		else : self.path += '/'
		if not output_path.endswith('/') : output_path += '/'
		self.output_path = '%s%s/' % (output_path, self.run_no)
		helper.mkdir(self.output_path)
		rd42Style()

		for key, value in self.config.items(histo_type) :
			setattr(self, key, value)
		self.file_path = self.path + self.root_file
		self.rand = ROOT.TRandom3(0)
Esempio n. 29
0
            def build_MMPBSA_in(cls, out_path=''):
                '''
                build MMPBSA.in in out_path
                '''
                if out_path == '':
                    mkdir('./tmp')
                    out_path = './tmp/MMPBSA.in'

                # make lines
                frame_line = '  '
                for i in ('startframe', 'endframe', 'interval'):
                    if cls.conf_in[i] != None:
                        frame_line = frame_line + i + '=' + str(
                            cls.conf_in[i]) + ', '
                output_line = '  verbose=' + str(
                    cls.conf_in['verbose']) + ', keep_files=' + str(
                        cls.conf_in['keep_files']) + ','
                gb_line = '  igb=' + str(
                    cls.conf_in['igb']) + ', saltcon=' + str(
                        cls.conf_in['saltcon']) + ','
                pb_line = '  istrng=' + str(
                    cls.conf_in['istrng']) + ', fillratio=' + str(
                        cls.conf_in['fillratio'])

                with open(out_path, 'w') as of:
                    print('GB and PB calculation', end=line_feed, file=of)
                    print('&general', end=line_feed, file=of)
                    print(frame_line, end=line_feed, file=of)
                    print(output_line, end=line_feed, file=of)
                    print('/', end=line_feed, file=of)
                    print('&gb', end=line_feed, file=of)
                    print(gb_line, end=line_feed, file=of)
                    print('/', end=line_feed, file=of)
                    print('&pb', end=line_feed, file=of)
                    print(pb_line, end=line_feed, file=of)
                    print('/', end=line_feed, file=of)

                return out_path
Esempio n. 30
0
def _run_train_with_summary(sess, model, x_train, y_train, x_eval, y_eval,
                            train_config):
    saver = tf.train.Saver()
    helper.mkdir(train_config.SUMMARY_DIR)
    summary_writer = tf.summary.FileWriter(logdir=train_config.SUMMARY_DIR)
    summary_writer.add_graph(sess.graph)
    for epoch in range(train_config.EPOCHS):
        # train
        for batch_x, batch_y in helper.get_batch(x_train,
                                                 y_train,
                                                 step=train_config.BATCH_SIZE):
            summary, _ = model.train(
                sess,
                batch_x,
                batch_y,
                dropout_keep_prob=train_config.DROPOUT_KEEP_PROB)
            summary_writer.add_summary(summary)
        # eval
        summary, loss = model.evaluate_loss(sess,
                                            batch_x,
                                            batch_y,
                                            dropout_keep_prob=1.0)
        summary_writer.add_summary(summary)
        _print_eval_results(loss, epoch)
Esempio n. 31
0
    def ProcessStartup(self, target=""):  # db connection stays open
        if not self.db:
            return
        session_table = self.db['sessions']
        data = dict(
            machine_id=sp.machine_id,
            analysis_id=sp.analysis_id,
            tag=config.tag,  # test if properly escaped
            version=config.version,
            target=target)
        session_id = session_table.insert(data)
        self._setDbSessionId(session_id)
        l.debug('INSERTING: {}'.format(data))  # use pprint

    def ProcessHook(self, ev):
        if not self.db:
            return
        hook_table = self.db['hooks']  # BUG: sqlite3 driver tries to create...
        data = dict(pid=ev.pid,
                    tid=ev.tid,
                    function=ev.function,
                    payload=buffer(ev.payload),
                    exact_param=ev.exact_param,
                    session_id=self._getDbSessionId(),
                    category=ev.category)
        hook_table.insert(data)  # BUG: sqlite3 driver tries to create existing
        l.debug('ÍNSERTING: {}'.format(data))


mkdir(config.log_dir)  # todo: remove this function.
Esempio n. 32
0
def main():
    # get mutant list
    MutaFlags = []
    with open('Single_Mut_list.txt') as f:
        for line in f:
            MutaFlag = line.strip()
            if MutaFlag != '':
                MutaFlags.append(MutaFlag)
    # deploy mutation
    for Flag in MutaFlags:
        mkdir(Flag)
        with open('./' + Flag + '/HPO4-MD-gen.py', 'w') as of:
            of.write('''import datetime
from Class_PDB import *
from Class_Conf import *
from helper import line_feed

# settings
Config.n_cores = 1
Config.max_core = 2000
Config.PC_cmd = 'srun'
wkflow_log_path = './MD-gen.log'

# MD settings
Config.Amber.box_type = 'oct'
Config.Amber.conf_min['ntr'] = '0'
Config.Amber.conf_heat['restraintmask'] = "':MOL'"
Config.Amber.conf_heat['restraint_wt'] = "100.0"
Config.Amber.conf_heat['tempi'] = '100.0'
Config.Amber.conf_heat['temp0'] = '296.15'

Config.Amber.conf_equi['ntr'] = '0'
Config.Amber.conf_equi['nstlim'] = 500000  # 1ns
Config.Amber.conf_equi['temp0'] = '296.15'

Config.Amber.conf_prod['nstlim'] = 30000000 # 60ns
Config.Amber.conf_prod['ntwx'] = '25000'
Config.Amber.conf_prod['temp0'] = '296.15'

def main():
        starttime = datetime.datetime.now()
        of = open(wkflow_log_path, 'wb', buffering=0)
        # deploy mutation
        of.write(('Working on: '+ ''' + repr(Flag) +
                     ''' +line_feed).encode('utf-8'))
        # --- Preparation ---
        pdb_obj = PDB('WT-HPO4.pdb')
        of.write(('Preparation: '+str(datetime.datetime.now()- starttime)+line_feed).encode('utf-8'))

        # --- Operation ---
        # Mutation
        pdb_obj.Add_MutaFlag(''' + repr(Flag) + ''')
        pdb_obj.PDB2PDBwLeap()
        of.write(('Mutation: p2pwl: '+str(datetime.datetime.now()- starttime)+line_feed).encode('utf-8'))

        # use minimization to relax each mutated PDB
        pdb_obj.PDB2FF(local_lig=0)
        pdb_obj.PDBMin(engine='Amber_GPU')
        of.write(('Mutation: PDBMin: '+str(datetime.datetime.now()- starttime)+line_feed).encode('utf-8'))

        # --- Sample with MD ---
        pdb_obj.PDB2FF(local_lig=0, ifsavepdb=1)
        pdb_obj.PDBMD(engine='Amber_GPU')
        of.write(('MD: '+str(datetime.datetime.now()- starttime)+line_feed).encode('utf-8'))			
			
        endtime = datetime.datetime.now()
        print(endtime - starttime)
        of.close()

if __name__ == "__main__":
        main()''')

        with open('./' + Flag + '/sub-EnzyHTP.cmd', 'w') as of:
            of.write('''#!/bin/bash
#SBATCH --job-name=MD_''' + Flag +
                     '''          # Assign an 8-character name to your job
#SBATCH --account=cla296
#SBATCH --partition=gpu-shared
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --gpus=1
#SBATCH --mem=50G
#SBATCH --no-requeue
#SBATCH --time=24:00:00         # Total run time limit (HH:MM:SS)

module purge
module load gpu
module load openmpi
module load slurm
module load amber/20

#EnzyHTP
source ~/bin/miniconda3/bin/activate
conda activate MutaGen
export PYTHONPATH=$PYTHONPATH:~/enzyme_workflow

python -u HPO4-MD-gen.py > HPO4-MD-gen.py.out''')
        os.system('cp ./*pdb ' + Flag)
from gym import wrappers
from collections import namedtuple, deque
from models_inverse import QNetwork, Predicter
from torch.utils.tensorboard import SummaryWriter
from torch.autograd import Variable
from datetime import datetime
from helper import mkdir
import gym_pacman
import cv2
from os.path import join, exists
from PIL import Image
from models import E2C, NormalDistribution

now = datetime.now()
dt_string = now.strftime("%d_%m_%Y_%H:%M:%S")
mkdir("", "log_files")
logging.basicConfig(filename="log_files/{}.log".format(dt_string),
                    level=logging.DEBUG)


class Agent():
    def __init__(self, state_size, action_size, config):
        self.seed = config["seed"]
        torch.manual_seed(self.seed)
        np.random.seed(seed=self.seed)
        random.seed(self.seed)
        self.env = gym.make(config["env_name"])
        self.env.seed(self.seed)
        self.state_size = state_size
        self.action_size = action_size
        self.clip = config["clip"]
Esempio n. 34
0
def gitPull(repoPullDir):
    helper.mkdir(repoPullDir)
    repo = git.Repo(repoPullDir)
    o = repo.remotes.origin
    o.pull()
Esempio n. 35
0
            print('-l <num>: load ckpt file from <dir>/history/<num>')
            print('-o: command line output result')
            print('-t: testing mode, default: training mode')
            sys.exit()
        elif opt in ('-l'):
            FLAG['ckptFile'] = 'history/%s/train.ckpt' % (arg)
            FLAG['loadHisNum'] = int(arg)
        elif opt in ('-o'):
            FLAG['isTrain'] = False
            FLAG['episodes'] = 1
            FLAG['cliOutput'] = True
        elif opt in ('-t'):
            FLAG['isTrain'] = False
            FLAG['episodes'] = 1

    mkdir('data')
    mkdir('data/%s' % (FLAG['dir']))
    mkdir('data/%s/history' % (FLAG['dir']))

    if not FLAG['isTrain']:
        with open('data/%s/param.csv' % (FLAG['dir'])) as csv_file:
            r = csv.DictReader(csv_file)
            for row in r:
                for column, value in row.items():
                    if value != None and not column in FLAG['noLoadHeader']:
                        if isinstance(FLAG[column], int):
                            FLAG[column] = int(value)
                        elif isinstance(FLAG[column], float):
                            FLAG[column] = float(value)
                        else:
                            FLAG[column] = value