Пример #1
0
def test_imagenet_vgg(device='cuda', arch='vgg'):
    # get dataset
    from torchvision import models
    imnet_dict = pkl.load(
        open('../dsets/imagenet/imnet_dict.pkl',
             'rb'))  # contains 6 images (keys: 9, 10, 34, 20, 36, 32)

    # get model and image
    if arch == 'vgg':
        model = models.vgg16(pretrained=True).to(device).eval()
    elif arch == 'alexnet':
        model = models.alexnet(pretrained=True).to(device).eval()
    im_torch = torch.randn(1, 3, 224, 224).to(device)

    # check that full image mask = prediction
    preds = model(im_torch).cpu().detach().numpy()
    cd_score, irrel_scores = cd.cd(np.ones((1, 3, 224, 224)),
                                   im_torch,
                                   model,
                                   device=device)
    cd_score = cd_score.cpu().detach().numpy()
    irrel_scores = irrel_scores.cpu().detach().numpy()
    assert (np.allclose(cd_score, preds, atol=1e-2))
    assert (np.allclose(irrel_scores, irrel_scores * 0, atol=1e-2))

    # check that rel + irrel = prediction for another subset
    # preds = preds - model.hidden_to_label.bias.detach().numpy()
    mask = np.ones((1, 3, 224, 224))
    mask[:, :, :14] = 1
    cd_score, irrel_scores = cd.cd(mask, im_torch, model, device=device)
    cd_score = cd_score.cpu().detach().numpy()
    irrel_scores = irrel_scores.cpu().detach().numpy()
    assert (np.allclose(cd_score + irrel_scores, preds, atol=1e-2))
Пример #2
0
 def test_reentrant(self):
     directory = mkdtemp()
     directory2 = mkdtemp()
     original = os.getcwd()
     with cd(directory):
         with cd(directory2):
             self.assertEqual(abspath(os.getcwd()), abspath(directory2))
         self.assertEqual(abspath(os.getcwd()), abspath(directory))
     self.assertEqual(abspath(os.getcwd()), abspath(original))
Пример #3
0
 def test_directory_not_deleted_afterward(self):
     directory = self.get_temp_dir()
     with cd(directory):
         self.assertTrue(exists(directory), "given directory was deleted!")
     self.assertTrue(exists(directory), "given directory was deleted!")
     with cd(directory):
         with open('hello.txt', mode='wt') as f:
             f.write('hello!')
         filename = abspath('hello.txt')
     self.assertTrue(exists(filename), "file in directory was deleted!")
Пример #4
0
 def test_changes_even_with_exceptions(self):
     directory = mkdtemp()
     original = os.getcwd()
     with self.assertRaises(ValueError):
         with cd(directory):
             raise ValueError
     self.assertEqual(abspath(os.getcwd()), abspath(original))
     with self.assertRaises(SystemExit):
         with cd(directory):
             raise SystemExit
     self.assertEqual(abspath(os.getcwd()), abspath(original))
Пример #5
0
def process_local_repo(location, output_dir, repo_name):
    """Convert a local repository to a series of JSON objects.

    Args:
        location (str): The path to a local repository.
        output_dir (str): The path to the directory to save the output files
            to.
        repo_name (str): The name to save to the JSON objects as the repository
            name.

    Returns:
        None
    """
    with cd(location):
        is_path_exist(output_dir)

        # Produce a JSON object from the blame of each file
        output_file = output_dir + "/" + repo_name.replace('/', '_') + ".json"
        with open(output_file, 'w') as f:
            for file in get_filelist(location):
                for line in btj.file_to_json(file, repo_name):
                    f.write(line + "\n")

        # Produce a map of files to the users who edited it
        output_file_map = output_dir + "/" + repo_name.replace('/', '_') + "_file_to_user_map.json"
        with open(output_file_map, 'w') as f:
            for line in ufm.repo_to_file_map_json(repo_name):
                f.write(line + "\n")
Пример #6
0
def get_local_repo_name(location):
    """The basename of a local repository.

    If a local repository is located at:

        /path/to/local/repo/

    This function will return "repo".

    This function must be called from within the repository, so using cd() to
    change the directory is advised.

    Args:
        location (str): The path to a local repository.

    Returns:
        str: The basename of the repository.

    """
    with cd(location):
        command = [
            "git",
            "rev-parse",
            "--show-toplevel",
        ]
        repo_name = subprocess.check_output(command)
        base = os.path.basename(repo_name).strip()
        return base
Пример #7
0
def process_local_repo(location, output_dir, repo_name):
    """Convert a local repository to a series of JSON objects.

    Args:
        location (str): The path to a local repository.
        output_dir (str): The path to the directory to save the output files
            to.
        repo_name (str): The name to save to the JSON objects as the repository
            name.

    Returns:
        None
    """
    with cd(location):
        is_path_exist(output_dir)

        output_file_code = output_dir + "/" + repo_name.replace(
            '/', '_') + "_code.txt"
        file_list = get_filelist(location)
        with open(output_file_code, 'w+') as out:
            for i, file in enumerate(file_list):
                with open(file, 'r') as input_file:
                    while True:
                        data = input_file.read(100000)
                        if data == '':
                            break
                        out.write(data)
                if i % 500 == 0 and i != 0:
                    print(f'Copied {i} out of {len(file_list)} files')
Пример #8
0
def process_local_repo(location, output_dir, repo_name):
    """Convert a local repository to a series of JSON objects.

    Args:
        location (str): The path to a local repository.
        output_dir (str): The path to the directory to save the output files
            to.
        repo_name (str): The name to save to the JSON objects as the repository
            name.

    Returns:
        None

    """
    with cd(location):
        is_path_exist(output_dir)

        # Produce a JSON object from the blame of each file
        output_file = output_dir + "/" + repo_name.replace('/', '_') + ".json"
        with open(output_file, 'w') as f:
            for file in get_filelist(location):
                for line in btj.file_to_json(file, location, repo_name):
                    f.write(line + "\n")

        # Produce a map of files to the users who edited it
        output_file_map = output_dir + "/" + repo_name.replace(
            '/', '_') + "_file_to_user_map.json"
        with open(output_file_map, 'w') as f:
            for line in ufm.repo_to_file_map_json(repo_name):
                f.write(line + "\n")
def get_scores_2d(model,
                  method,
                  ims,
                  im_torch=None,
                  pred_ims=None,
                  model_type='mnist',
                  device='cuda'):
    scores = []
    if method == 'cd':
        for i in range(ims.shape[0]):  # can use tqdm here, need to use batches
            scores.append(
                cd.cd(np.expand_dims(ims[i], 0),
                      im_torch,
                      model,
                      model_type,
                      device=device)[0].data.cpu().numpy())
        scores = np.squeeze(np.array(scores))
    elif method == 'build_up':
        for i in range(ims.shape[0]):  # can use tqdm here, need to use batches
            scores.append(pred_ims(model, ims[i])[0])
        scores = np.squeeze(np.array(scores))
    elif method == 'occlusion':
        for i in range(ims.shape[0]):  # can use tqdm here, need to use batches
            scores.append(pred_ims(model, ims[i])[0])
        scores = -1 * np.squeeze(np.array(scores))
    if scores.ndim == 1:
        scores = scores.reshape(1, -1)
    return scores
Пример #10
0
def get_local_repo_name(location):
    """The basename of a local repository.

    If a local repository is located at:

        /path/to/local/repo/

    This function will return "repo".

    This function must be called from within the repository, so using cd() to
    change the directory is advised.

    Args:
        location (str): The path to a local repository.

    Returns:
        str: The basename of the repository.

    """
    with cd(location):
        command = [
            "git",
            "rev-parse",
            "--show-toplevel",
        ]
        repo_name = subprocess.check_output(command)
        base = os.path.basename(repo_name).strip()
        return base
Пример #11
0
def train(args, model, device, train_loader, optimizer, epoch, regularizer_rate, until_batch = -1):
    model.train()
    for batch_idx, (data, target) in enumerate(train_loader):

        if until_batch !=-1 and batch_idx > until_batch:
            break
        data, target = data.to(device), target.to(device)
         
        optimizer.zero_grad()
        output = model(data)
        loss = F.nll_loss(output, target)
        
        if regularizer_rate !=0:
            add_loss = torch.zeros(1,).cuda()

            
            if args.grad_method ==0:
                rel, irrel = cd.cd(blob, data,model)
                add_loss += torch.nn.functional.softmax(torch.stack((rel.view(-1),irrel.view(-1)), dim =1), dim = 1)[:,0].mean()

                #print(torch.cuda.max_memory_allocated(0)/np.power(10,9))
                (regularizer_rate*add_loss +loss).backward()
            elif args.grad_method ==1:
                add_loss +=gradient_sum(data, target, torch.FloatTensor(blob).to(device),  model, F.nll_loss)
                (regularizer_rate*add_loss).backward()

                #print(torch.cuda.max_memory_allocated(0)/np.power(10,9))
                optimizer.step()
                loss = F.nll_loss(output, target)
                loss.backward()

            elif args.grad_method ==2:
                for j in range(len(data)):
                    add_loss +=(eg_scores_2d(model, data, j, target, num_samples) * torch.FloatTensor(blob).to(device)).sum()
                (regularizer_rate*add_loss).backward()

                #print(torch.cuda.max_memory_allocated(0)/np.power(10,9))
                optimizer.step()
                loss = F.nll_loss(output, target)
     
                loss.backward()
        else:
            add_loss = torch.zeros(1,)
            loss.backward()

        print(torch.cuda.max_memory_allocated(0)/np.power(10,9))
        optimizer.step()
        
        
        if batch_idx % args.log_interval == 0:
            pred = output.argmax(dim=1, keepdim=True)
            acc = 100.*pred.eq(target.view_as(pred)).sum().item()/len(target)
            # print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}, Acc: ({:.0f}%), CD Loss: {:.6f}'.format(
                # epoch, batch_idx * len(data), len(train_loader.dataset),
                # 100. * batch_idx / len(train_loader), loss.item(),acc,   add_loss.item()))
              
            s.losses_train.append(loss.item())
            s.accs_train.append(acc)
            s.cd.append(add_loss.item())
Пример #12
0
def have_twohundred_commits(sha, lastVersionHave):
  with cd("projects/" + PROJECT):
    call(["git", "checkout", sha])
    qtdCommits = int(check_output(["ruby", "./../../src/count-commits.rb", "./../../projects/" + PROJECT]))
    call(["git", "reset", "--hard", "master"])
    if (((qtdCommits % 200) == 0 and qtdCommits > lastVersionHave) or (qtdCommits > lastVersionHave + 200)):
      return True
  return False
Пример #13
0
 def test_has_current_and_previous_attributes(self):
     directory = mkdtemp()
     original = os.getcwd()
     with cd(directory) as dirs:
         self.assertEqual(abspath(original), abspath(str(dirs.previous)))
         self.assertEqual(abspath(directory), abspath(str(dirs.current)))
     self.assertEqual(abspath(original), abspath(str(dirs.previous)))
     self.assertEqual(abspath(directory), abspath(str(dirs.current)))
Пример #14
0
 def test_changing_directory_still_works(self):
     directory = mkdtemp()
     directory2 = mkdtemp()
     original = os.getcwd()
     with cd(directory):
         self.assertEqual(abspath(os.getcwd()), abspath(directory))
         os.chdir(directory2)
         self.assertEqual(abspath(os.getcwd()), abspath(directory2))
     self.assertEqual(abspath(os.getcwd()), abspath(original))
Пример #15
0
 def __build_dpdk(self, c_flag):
     with cd(self.dpdk_path):
         cmd = ["make", "uninstall"]
         shell.run_cmd("Uninstalling DPDK", cmd, self.logfd)
         if c_flag == 1:
             cmd = ["make", "config", "T=" + self.tgt]
             shell.run_cmd("Configuring DPDK", cmd, self.logfd)
         cmd = ["make", "install", "T=" + self.tgt]
         shell.run_cmd("Building and installing DPDK", cmd, self.logfd)
Пример #16
0
 def __build_vmxnet3(self):
     with cd(self.vmxnet3_path):
         cmd = ["make", "clean"]
         shell.run_cmd("Cleaning up vmxnet3", cmd, self.logfd)
         cmd = [
             "make", "all", "T=" + self.tgt,
             "RTE_INCLUDE=" + self.dpdk_path + "/" + self.tgt + "/include"
         ]
         shell.run_cmd("Building vmxnet3", cmd, self.logfd)
Пример #17
0
 def test_initialization_before_context_entering(self):
     directory = mkdtemp()
     new_original = mkdtemp()
     old_original = os.getcwd()
     dirs = cd(directory)
     self.assertEqual(abspath(os.getcwd()), abspath(old_original))
     os.chdir(new_original)
     with dirs:
         self.assertEqual(abspath(os.getcwd()), abspath(directory))
     self.assertEqual(abspath(os.getcwd()), abspath(new_original))
Пример #18
0
def createzipfile(files):
    with tempfile.TemporaryDirectory() as td:
        for n, v in files.items():
            with open(td + "/" + n, "w") as f:
                print(v, file=f)
        archivename = "env.zip"
        with cd.cd(td):
            subprocess.run(['zip', '-qjr', archivename, '.'])
            with open(archivename, 'rb') as f:
                return f.read()
Пример #19
0
 def test_enter_and_exit_methods(self):
     directory = mkdtemp()
     new_original = mkdtemp()
     old_original = os.getcwd()
     dirs = cd(directory)
     self.assertEqual(abspath(os.getcwd()), abspath(old_original))
     os.chdir(new_original)
     dirs.enter()
     self.assertEqual(abspath(os.getcwd()), abspath(directory))
     dirs.exit()
     self.assertEqual(abspath(os.getcwd()), abspath(new_original))
Пример #20
0
def change_consts_py(path, input_file):
    with cd(path):
        for line in fileinput.input("consts.py", inplace=True):
            if line.startswith('INPUT_DIR'):
                print('INPUT_DIR = \'' + Path(input_file).parent + '\'')
            elif line.startswith('CONFIG_FILE'):
                print('CONFIG_FILE = \'' + input_file + '\'')
            else:
                print(line)
        pass
    pass
Пример #21
0
def run_module(name, dir):
    dir = Path(dir).absolute().resolve()
    with cd(dir):

        set_user_configs(name)

        LOGS_DIR.mkdir(parents=True, exist_ok=True)

        run_notebook_path = dir / ('%s.ipynb' % RUN_SCRIPT_NAME)
        run_script_path = dir / ('%s.sh' % RUN_SCRIPT_NAME)
        run_notebook = run_notebook_path.exists()
        run_shell_script = run_script_path.exists()

        exception = None
        with OUT_PATH.open('w') as out, ERR_PATH.open('w') as err:
            if run_notebook and run_shell_script:
                raise Exception('Found both %s and %s' % (run_notebook_path, run_script_path))
            elif run_notebook:
                from papermill import execute_notebook, PapermillExecutionError
                print('Executing notebook %s in-place' % run_notebook_path)
                try:
                    execute_notebook(
                        str(run_notebook_path),
                        str(run_notebook_path),
                        progress_bar=False,
                        stdout_file=out,
                        stderr_file=err,
                        kernel_name=JUPYTER_KERNEL_NAME,
                    )
                except PapermillExecutionError as e:
                    if e.evalue.startswith(EARLY_EXIT_EXCEPTION_MSG_PREFIX):
                        print('Run notebook %s exited with "OK" msg' % run_notebook_path)
                    else:
                        exception = e
            elif run_shell_script:
                cmd = [ str(run_script_path) ]
                print('Running: %s' % run_script_path)
                try:
                    check_call(cmd, stdout=out, stderr=err)
                except CalledProcessError as e:
                    exception = e
            else:
                raise Exception('No runner script found at %s or %s' % (run_notebook_path, run_script_path))

            if exception:
                with open(FAILURE_PATH, 'w') as f:
                    f.write('1\n')
                err.write(str(exception))
            else:
                Path(SUCCESS_PATH).touch()

    print('Module finished: %s' % name)
Пример #22
0
    def __clone_remote(self):
        with cd(self.__tempdir):
            command = [
                "git",
                "clone",
                "--",
                self.remote_location,
            ]
            subprocess.check_call(command)

        # Set the local directory. The only item in the directory will be the
        # repository.
        items = os.listdir(self.__tempdir)
        self.local_location = self.__tempdir + '/' + items[0]
Пример #23
0
    def __clone_remote(self):
        with cd(self.__tempdir):
            command = [
                "git",
                "clone",
                "--",
                self.remote_location,
            ]
            subprocess.check_call(command)

        # Set the local directory. The only item in the directory will be the
        # repository.
        items = os.listdir(self.__tempdir)
        self.local_location = self.__tempdir + '/' + items[0]
Пример #24
0
 def getZipValue(self):
     if self.ends != ".pl":
         raise ErrorPL("can't zip non pl files")
     import tempfile
     with tempfile.TemporaryDirectory() as thedir:
         p=self._createdir(Path(thedir))
         archivename='env.zip'
         with cd.cd(thedir) :
             subprocess.run(['bash','-c','pwd'])
             subprocess.run(['zip','-qjr',archivename,'.'])
             with open(archivename,'rb') as f:
                 return f.read()
     
     from shutil import rmtree
     rmtree('/tmp/env/', ignore_errors=True)
     p=pathlib.Path('/tmp/env/')
     p=self._createdir(p)
     self.zipname = str(p.resolve() /  "env.zip")
     with cd.cd(str(p)) :
         subprocess.run(['zip','-qjr','env.zip','.'])
     with open(self.zipname,'r') as f:
         return f.read()
     raise Exception("problèmes avec le fichier ",self.zipname)
Пример #25
0
 def test_no_argument_given(self):
     original = os.getcwd()
     dirs = cd()
     with dirs:
         self.assertNotEqual(abspath(os.getcwd()), abspath(original))
         self.assertEqual(os.listdir(), [])
         with open('hello.txt', mode='wt') as f:
             f.write('hello!')
         full_path = abspath('hello.txt')
         self.assertNotEqual(dirname(full_path), abspath(original))
         with open(full_path, mode='rt') as f:
             self.assertEqual(f.read(), 'hello!')
     self.assertEqual(abspath(os.getcwd()), abspath(original))
     self.assertFalse(exists(full_path), "temporary directory not deleted")
Пример #26
0
def test_mnist(device='cuda'):
    # load the dataset
    sys.path.append('../dsets/mnist')
    import dsets.mnist.model
    device = 'cuda'
    im_torch = torch.randn(1, 1, 28, 28).to(device)

    # load the model
    model = dsets.mnist.model.Net().to(device)
    model.load_state_dict(
        torch.load('../dsets/mnist/mnist.model', map_location=device))
    model = model.eval()

    # check that full image mask = prediction
    preds = model.logits(im_torch).cpu().detach().numpy()
    cd_score, irrel_scores = cd.cd(np.ones((1, 1, 28, 28)),
                                   im_torch,
                                   model,
                                   model_type='mnist',
                                   device=device)
    cd_score = cd_score.cpu().detach().numpy()
    irrel_scores = irrel_scores.cpu().detach().numpy()
    assert (np.allclose(cd_score, preds, atol=1e-2))
    assert (np.allclose(irrel_scores, irrel_scores * 0, atol=1e-2))

    # check that rel + irrel = prediction for another subset
    # preds = preds - model.hidden_to_label.bias.detach().numpy()
    mask = np.zeros((28, 28))
    mask[:14] = 1
    cd_score, irrel_scores = cd.cd(mask,
                                   im_torch,
                                   model,
                                   model_type='mnist',
                                   device=device)
    cd_score = cd_score.cpu().detach().numpy()
    irrel_scores = irrel_scores.cpu().detach().numpy()
    assert (np.allclose(cd_score + irrel_scores, preds, atol=1e-2))
Пример #27
0
def getRepoByName(name):
    """
    find the repo name in directory premierlangage/repo/
    >>> getRepoByName(None).endswith("/premierlangage/repo/plbank")
    True
    >>> getRepoByName("plbank").endswith("/premierlangage/repo/plbank")
    True
    """
    if name==None:
        name="plbank"
    with cd.cd(os.path.dirname(__file__)):
        prems = subprocess.Popen(['git', 'rev-parse', '--show-toplevel'],stdout=subprocess.PIPE).communicate()[0].rstrip().decode("utf-8")
    p = Path(prems+"/repo/"+name)
    if not p.exists():
        raise Exception(str(p)+" doesn't exist")
    return str(p)
Пример #28
0
def main(user, target, lichess_api_key, args):
    print("Downloading %s's games to %s:" % (user, target))
    #Make my target directory
    try:
        os.makedirs(target)
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise

    with cd.cd(target):
        if (args.chess):
            archives = 'https://api.chess.com/pub/player/%s/games/archives' % user
            for archive in requests.get(archives).json()['archives']:
                download_archive(archive, target)

        if (args.lichess):
            getLichessGames(url, target)
Пример #29
0
    def __init__(self, connection):
        """
        :param connection: JMX address in form <hostname>:<port>
        NB, this is the port for JMX not Kafka, for example brokername:9999
        You can get JMX ports from zookeeper using KafkaInfo.jmxports()
        """
        if self._is_windows():
            print("ERROR JmxMetrics will not run on Windows")
            return

        connection_timeout = 2

        with cd(os.path.dirname(inspect.stack()[0][1])):
            self.jmxterm = pexpect.spawn("java -jar jmxterm.jar")
            self.jmxterm.expect_exact("$>")  # got prompt, can continue
            self.jmxterm.sendline("open " + connection)
            self.jmxterm.expect_exact(
                "#Connection to " + connection + " is opened",
                connection_timeout)
Пример #30
0
def create_project(parametersobject):
	dd = parametersobject.deriveddic
	pd = parametersobject.parameterdic
	name = pd['Initial_dimer_pdb']
	steps = pd['Timesteps']
	hours = pd['Simulation_hours']
	minutes = pd['Simulation_minutes']
	number_of_orientations = pd['Number_of_orientations']
	separation_distance = pd['COM_separation']
	Boundary_margin = pd['Boundary_margin']
	Initial_dimer_pdb = pd['Initial_dimer_pdb']
	move_chain_id = dd['smaller_chain']
	fix_chain_id = dd['bigger_chain']
	Jobname = pd['Jobname']
	Path_to_awsem = pd['Path_to_awsem']
	Path_to_lmp_serial = pd['Path_to_lmp_serial']
	Python2_command = pd['Python2_command']
	model_number = 0
	
	f_orientation_details = open("create_project_data.txt", "w+")
	max_radius = 0.0
	
	for orientation in range(1,number_of_orientations+1):
		output_pdb_name = 'r_'+str(orientation).zfill(3)+'.pdb'
		input_file_name = os.path.normpath(os.getcwd()+'/'+Initial_dimer_pdb)
		with cd("md_input"):
			results = create_random_pdb(separation_distance = separation_distance, move_chain_id = move_chain_id, fix_chain_id = fix_chain_id, input_file_name = input_file_name, output_pdb_name = output_pdb_name, model_number = model_number)
		max_radius = max(max_radius, results["Max_distance"])
		f_orientation_details.write('orientation number\t'+str(orientation)+'\n\n')
		for key in results:
			f_orientation_details.write(key+'\t\t'+str(results[key])+'\n')
		f_orientation_details.write('-------------------------\n\n')

	max_radius+=Boundary_margin
	f_orientation_details.close()
	
	
	group_names = []
	name = pd['Initial_dimer_pdb'][:-4]
	f_in = open(name+"_recentred"+".in", "r")
	for line in f_in:
		if line.strip().split()[:1] == ['group']:
			group_names.append(line)
	f_in.close()
	os.remove(name+"_recentred"+".in")
	
	copy(name+"_recentred"+".seq", 'md_input')
	
	
	
	stride = os.path.normpath(Path_to_awsem+'dimer_interface_protocol/stride/stride')
	os.system(stride + ' '+pd['Initial_dimer_pdb']+' > ssweight.stride')
	location = os.path.normpath(Path_to_awsem+"create_project_tools/stride2ssweight.py")
	os.system(Python2_command+' '+location+' > md_input/ssweight')
	
	
	
	with cd("md_input"):	
		for orientation in range(1,number_of_orientations+1):
			file_name_start = 'r_'+str(orientation).zfill(3)
			location = os.path.normpath(Path_to_awsem+"create_project_tools/PDBToCoordinates.py")
			os.system(Python2_command+" "+location+' '+file_name_start+" "+file_name_start+".coord")
			location = os.path.normpath(Path_to_awsem+"create_project_tools/CoordinatesToWorkLammpsDataFile.py")
			os.system(Python2_command+" "+location+" "+file_name_start+".coord "+file_name_start+".data -b")
			os.remove(file_name_start+".in")
			os.remove(file_name_start+".seq")
			f = open(file_name_start+".pbs", "w+")
			f.write("#!/bin/bash\n")
			f.write("#PBS -S /bin/bash\n")
			f.write("#PBS -l pmem=512mb\n")
			f.write("#PBS -l nodes=1:ppn=1\n")
			f.write("#PBS -l walltime="+str(hours).zfill(2)+':'+str(minutes).zfill(2)+':00\n')
			f.write("#PBS -N "+Jobname+str(orientation).zfill(3)+'\n')
			f.write("cd $PBS_O_WORKDIR\n")
			f.write(Path_to_lmp_serial+" < r_"+str(orientation).zfill(3)+".in\n")
			f.close()


		f_submit_all_pbs = open("submitall.sh", "w+")
		for orientation in range(1,number_of_orientations+1):
			f_submit_all_pbs.write('qsub '+'r_'+str(orientation).zfill(3)+'.pbs >> submited.txt\n')
		f_submit_all_pbs.close()
		
		src = os.path.normpath(Path_to_awsem + '/dimer_interface_protocol/files')
		src_files = os.listdir(src)
		dest = os.getcwd()
		for file_name in src_files:
			full_file_name = os.path.join(src, file_name)
			if (os.path.isfile(full_file_name)):
				copy(full_file_name, dest)
		
		
		
		
		
		first_chain_length = dd['first_chain_length']
		second_chain_length = dd['second_chain_length']
		f_fragmem = open("fragsLAMW.mem", "w+")
		f_gro = open("chain1.gro", "r")
		line = next(f_gro)
		line = next(f_gro)
		line = next(f_gro)
		g1 = int(line.strip().split()[0])
		f_gro.close()
		f_gro = open("chain2.gro", "r")
		line = next(f_gro)
		line = next(f_gro)
		line = next(f_gro)
		g2 = int(line.strip().split()[0])
		f_gro.close()
		f_fragmem.write("[Target]\nquery\n\n[Memories]\n")
		f_fragmem.write("chain1.gro %d  %d %d 1\n" %(1, g1, first_chain_length))
		f_fragmem.write("chain2.gro %d %d %d 1" %(1+first_chain_length, g2, second_chain_length))
		f_fragmem.close()
		
		
		
		
		
		
		
		
		
		
		for orientation in range(1,number_of_orientations+1):
			random_integer = np.random.randint(low = 1000, high = 9999999)
			file_name_start = 'r_'+str(orientation).zfill(3)
			
			
			f = open(file_name_start+".in", "w+")

			f.write('# 3d protein simulation\n')
			f.write('\n')
			f.write('units real\n')
			f.write('\n')
			f.write('timestep 5\n')
			f.write('\n')
			f.write('dimension\t3\n')
			f.write('\n')
			f.write('boundary f f f\n')
			f.write('\n')
			f.write('log\t'+file_name_start+'.log\t  \n')
			f.write('neighbor\t10 bin\n')
			f.write('neigh_modify\tdelay 5\n')
			f.write('\n')
			f.write('atom_modify sort 0 0.0\n')
			f.write('\n')
			f.write('special_bonds fene\n')
			f.write('\n')
			f.write('region\tr1 sphere 0.0 0.0 0.0 {0:.2f} side in \n'.format(max_radius))
			f.write('\n')
			f.write('atom_style\tawsemmd\n')
			f.write('\n')
			f.write('\n')
			f.write('bond_style harmonic\n')
			f.write('\n')
			f.write('pair_style vexcluded 2 3.5 3.5\n')
			f.write('\n')
			f.write('read_data '+file_name_start+'.data\n')
			f.write('\n')
			f.write('pair_coeff * * 0.0\n')
			f.write('pair_coeff 1 1 20.0 3.5 4.5\n')
			f.write('pair_coeff 1 4 20.0 3.5 4.5\n')
			f.write('pair_coeff 4 4 20.0 3.5 4.5\n')
			f.write('pair_coeff 3 3 20.0 3.5 3.5\n')
			f.write('\n')
			f.write('\n')
			f.write('velocity\tall create 300.0 '+str(random_integer)+'\n')
			f.write('\n')
			for line in group_names:
				f.write(line)
				f.write('\n')
			n = dd['first_chain_max_id']
			Dump_time = pd['Dump_time']
			Restart_time = pd['Restart_time']
			f.write('group\t\tchain_1 id <= %d\n' % (n))
			f.write('group\t\tchain_2 id >= %d\n' % (n+1))
			f.write('\n')
			f.write('fix\t\t  1 all nvt temp 300.0 300.0 10.0\n')
			f.write('fix\t\t  2 alpha_carbons backbone beta_atoms oxygens fix_backbone_coeff.data '+name+"_recentred"+'.seq\n')
			f.write('fix\t\t  3 all wall/region r1 harmonic 10.0 1.0 5.0\n')
			if dd['first_chain_is_bigger']:
				f.write('fix\t\t  4 chain_1 recenter 0.0 0.0 0.0 \n')
			else:
				f.write('fix\t\t  4 chain_2 recenter 0.0 0.0 0.0 \n')
			f.write('\n')
			f.write('\n')
			f.write('\n')
			f.write('\n')
			f.write('thermo_style\tcustom step etotal pe ke temp evdwl enthalpy eangle epair emol\n')
			f.write('thermo\t\t5000\n')
			f.write('dump\t\t1 all atom '+str(Dump_time)+' '+file_name_start+'.lammpstrj\n')
			f.write('\n')
			f.write('dump_modify\t1 sort id\n')
			f.write('\n')
			f.write('restart\t\t%d '% (5000)+file_name_start+'.restarttemp1 '+file_name_start+'.restarttemp2\n' )
			f.write('restart\t\t%d '% (Restart_time)+file_name_start+'.restart\n' )
			f.write('\n')
			f.write('variable E_bond  equal emol\n')
			f.write('variable E_chain equal f_2[1]\n')
			f.write('variable E_excl  equal epair\n')
			f.write('variable E_chi   equal f_2[3]\n')
			f.write('variable E_rama  equal f_2[4]\n')
			f.write('variable E_dssp  equal f_2[6]\n')
			f.write('variable E_pap   equal f_2[7]\n')
			f.write('variable E_water equal f_2[8]\n')
			f.write('variable E_helix equal f_2[10]\n')
			f.write('variable E_fmem  equal f_2[12]\n')
			f.write('variable E_P     equal v_E_chain+v_E_chi+v_E_rama+v_E_water+v_E_helix+v_E_fmem+v_E_excl+v_E_bond+v_E_dssp+v_E_pap\n')
			f.write('variable E_K     equal ke\n')
			f.write('variable E_total equal v_E_P+v_E_K\n')
			f.write('variable e_total equal etotal\n')
			f.write('variable Step equal step\n')
			f.write('variable p_e equal pe\n')
			f.write('fix energy all print 5000 "${Step} ${e_total} ${p_e} ${E_K} ${E_chain} ${E_bond} ${E_chi} ${E_rama} ${E_excl} ${E_dssp} ${E_pap} ${E_water} ${E_helix} ${E_fmem} ${E_P} ${E_total} " file '+file_name_start+'_energy.log screen no\n')
			f.write('\n')
			f.write('\n')
			f.write('\n')
			f.write('\n')
			f.write('reset_timestep\t0\n')
			f.write('run\t\t'+str(steps)+'\n')
			f.close()
Пример #31
0
import cd
import oem
import office97

print("Available options:")
print("1. CD Key")
print("2. OEM Key")
print("3. Office 97 Key")
print("4. Quit")
sel = input("Select an option: ")
if sel == "1":
    print("\nCD Key: " + cd.cd())
elif sel == "2":
    print("\nOEM Key: " + oem.oem())
elif sel == "3":
    print("\nOffice 97 Key: " + office97.office97())

Пример #32
0
def set_svn_ignore_files(template_path):
    # TODO ????????? ??????
    with cd(template_path):
        check_call("svn update --set-depth exclude local.properties")
        check_call("svn update --set-depth exclude ant.properties")
        check_call("svn update --set-depth exclude project.properties")
Пример #33
0
 def __exit__(self, etype, value, traceback):
     if self.repo_dir:
         with cd(self.repo_dir):
             v = vagrant.Vagrant(quiet_stdout=False)
             v.halt()
Пример #34
0
    parser.add_argument("source_path", help="Path to file to comiple")
    parser.add_argument(
        "-i", "--interactive", help="runs pdftex with -interaction=nonstopmode", default=False, action="store_true"
    )
    group = parser.add_mutually_exclusive_group()
    group.add_argument("-v", "--verbose", help="Be verbose about what is going on", default=False, action="store_true")
    group.add_argument(
        "-q",
        "--quiet",
        help="Suppress normal output. Returns >0 on error, 0 otherwise.",
        default=False,
        action="store_true",
    )
    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO)
    elif args.quiet:
        logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.CRITICAL)
    else:
        logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO)

    check_file_exists(args.source_path)

    sourcefolder, filepath = args.source_path.rsplit(os.path.sep, 1)
    with cd(sourcefolder):
        output = compile_latex(filepath, args.interactive)

        if not args.interactive:
            parse_output(output)
def cdep(model, data, blobs):
    rel, irrel = cd.cd(blobs, data, model)
    return torch.nn.functional.softmax(torch.stack(
        (rel.view(-1), irrel.view(-1)), dim=1),
                                       dim=1)[:, 0].mean()
Пример #36
0
 def __enter__(self):
     if self.repo_dir:
         with cd(self.repo_dir):
             v = vagrant.Vagrant(quiet_stdout=False)
             v.up()
             print("...virtual cluster is up.")
            print('test_cmd: ',test_cmd)





# Status 0 means no error. 
stds_docker_status = {}
stds_docker_status_headers = ['Line', 'T#', 'Environment Name', 'Status']
for s in stds:
    
        
    docker_build_up_cmd = s.getDockerComposeCMD()
    docker_down_cmd = s.getDockerComposeDownCMD()
    print('docker_build_cmd: ', docker_build_up_cmd)
    with cd(s.getCodePath()):
        if args.down:
            print(f'Down Docker compose with all resources of {s.student_directory}')
            os.system(docker_down_cmd)
            
        print(f'Executing Docker compose of {s.student_directory}')
        returned_value = os.system(docker_build_up_cmd)
        print('RETURNED STATUS = ', returned_value)
        stds_docker_status[s.env_name] = [s.line, s.team, s.env_name, returned_value]
print(f'stds_docker_status: {stds_docker_status}')

status_file_path = STATUS_FILE_PATH

## Write csv
with open(status_file_path, 'w', newline='') as csvfile:
    spamwriter = csv.writer(csvfile, delimiter=',',
Пример #38
0
#!/usr/bin/python

import os
from os import listdir
from cd import cd

ck_path = os.getcwd() + '/images/CK+/cohn-kanade-plus-images/'
with cd(ck_path):
    all_dir = os.listdir('.')
    for directory in all_dir: #S-level
        if directory[0] == '.':
            continue
        else:
            subdir_path = ck_path + directory + '/'
            with cd(subdir_path):
                all_subdirs = os.listdir('.') #0-level
                for subdir in all_subdirs:
                    if subdir[0] == '.':
                        continue
                    else:
                        img_level_path = subdir_path + subdir + '/'
                        with cd(img_level_path):
                            all_imgs = os.listdir('.')
                            last_img_path = img_level_path + all_imgs[-1]
                            print last_img_path
Пример #39
0
import sys
import os
from cd import cd
from subprocess import call

PROJECT = ''
if len(sys.argv) > 1:
  PROJECT = sys.argv[1]
else:
  print 'Give parameter (project name)'
  sys.exit()

PATH = '../minning-util-codes/DBs/' + PROJECT + '/parts'

folders = next(os.walk(PATH))[1]

for i in range(len(folders)):
  i += 1
  file_commits = "%s/%i_part/%i_part-all.txt" % (PATH, i, i)
  with cd("data/sentistrength"):
    call(["java", "-jar", "sentistrength-0.1.jar", "sentidata", "sentistrength_data/", "input", "../../%s" % file_commits, "explain"])