Exemple #1
0
    def __init__(self, view):
        self.window_id = view.window().id()
        settings = get_settings(view)

        # infer `python_interpreter` and `python_virtualenv`
        python_interpreter = settings.get('python_interpreter')
        python_virtualenv = settings.get('python_virtualenv')

        if python_interpreter and not python_virtualenv:
            python_virtualenv = up(up(python_interpreter))

        if python_virtualenv and not python_interpreter:
            python_interpreter = join(python_virtualenv, 'bin', 'python')

        if python_virtualenv and python_interpreter:
            self.env = Environment(python_virtualenv, python_interpreter)
        else:
            self.env = jedi.get_default_environment()

        # prepare the extra packages if any
        extra_packages = settings.get('extra_packages')
        if extra_packages:
            self.sys_path = self.env.get_sys_path() + extra_packages
        else:
            self.sys_path = None

        # how to autocomplete arguments
        self.complete_funcargs = settings.get('complete_funcargs')
    def __init__(self, repo_url, repo_name, repo_lang):
        self.repo_url = repo_url
        self.repo_name = repo_name
        self.repo_lang = repo_lang
        self.repo_obj = git2repo.git2repo(self.repo_url, self.repo_name)
        self.repo = self.repo_obj.clone_repo()
        self.cwd = Path(os.getcwd())
        if platform.system() == 'Darwin' or platform.system() == 'Linux':
            self.repo_path = os.getcwd() + '/temp_repo/' + self.repo_name
            self.file_path = up(
                self.cwd) + '/results/refactored/' + self.repo_name + '.csv'
        else:
            self.repo_path = os.getcwd() + '\\temp_repo\\' + self.repo_name
            self.file_path = up(
                self.cwd) + '\\results\\refactored\\' + self.repo_name + '.csv'
        print(self.file_path)
        self.refactored_pairs = self.read_commits()
        self.all_commits = self.repo_obj.get_current_commit_objects()
        # Reference current directory, so we can go back after we are done.

        # Generate path to store udb files
        self.udb_path = self.cwd.joinpath(".temp", "udb")

        # Create a folder to hold the udb files
        if not self.udb_path.is_dir():
            os.makedirs(self.udb_path)

        # Generate source path where the source file exist
        self.source_path = self.cwd.joinpath(".temp", "sources",
                                             self.repo_name)
Exemple #3
0
def getLogDir():
    '''
    Makes all code simpler. If the alias are changed
    in the imports section that could be an issue
    '''
    logdir = up(up(__file__))
    return join(logdir, "logs")
Exemple #4
0
    def test_retrieve_hierarchy_creates_tree_of_bones(self):
        collection = get_collection()

        self.loadBlend(
            up(up(up(self.relpath()))) + '/testfiles/unordered_bones.blend')

        hierarchy, _ = retrieve_hierarchy(self, 'lorem ipsum')

        self.assertEqual(11, len(hierarchy.pivots))

        self.assertEqual('ROOTTRANSFORM', hierarchy.pivots[0].name)
        self.assertEqual(-1, hierarchy.pivots[0].parent_id)

        self.assertEqual('Location', hierarchy.pivots[1].name)
        self.assertEqual(0, hierarchy.pivots[1].parent_id)
        self.assertEqual('fx_damage_3', hierarchy.pivots[2].name)
        self.assertEqual(1, hierarchy.pivots[2].parent_id)
        self.assertEqual('tp_2', hierarchy.pivots[3].name)
        self.assertEqual(0, hierarchy.pivots[3].parent_id)
        self.assertEqual('turret_2', hierarchy.pivots[4].name)
        self.assertEqual(3, hierarchy.pivots[4].parent_id)
        self.assertEqual('barrel_2', hierarchy.pivots[5].name)
        self.assertEqual(4, hierarchy.pivots[5].parent_id)
        self.assertEqual('fx_damage_2', hierarchy.pivots[6].name)
        self.assertEqual(4, hierarchy.pivots[6].parent_id)
        self.assertEqual('tp_1', hierarchy.pivots[7].name)
        self.assertEqual(0, hierarchy.pivots[7].parent_id)
        self.assertEqual('turret_1', hierarchy.pivots[8].name)
        self.assertEqual(7, hierarchy.pivots[8].parent_id)
        self.assertEqual('barrel_1', hierarchy.pivots[9].name)
        self.assertEqual(8, hierarchy.pivots[9].parent_id)
        self.assertEqual('fx_damage_1', hierarchy.pivots[10].name)
        self.assertEqual(8, hierarchy.pivots[10].parent_id)
Exemple #5
0
    def __init__(self,repo_url,repo_name,repo_lang,code_path):
        self.repo_url = repo_url
        self.repo_name = repo_name
        self.repo_lang = repo_lang
        #self.repo_obj = git2repo.git2repo(self.repo_url,self.repo_name)
        self.root_dir = code_path
        print("root:",self.root_dir)
        if platform.system() == 'Darwin' or platform.system() == 'Linux':
            self.repo_path = self.root_dir+ '/commit_guru/ingester/CASRepos/git/' + self.repo_name
            self.file_path = up(self.root_dir) + '/data/commit_guru/' + self.repo_name + '.csv'
            #self.committed_file = up(os.getcwd()) + '/data/committed_files/' + self.repo_name + '_committed_file.pkl'
            self.und_file = up(self.root_dir) + '/data/understand_files/' + self.repo_name + '_understand.csv'
        else:
            self.repo_path = up(os.getcwd()) + '\\temp_repo\\' + self.repo_name
            self.file_path = up(os.getcwd()) + '\\data\\commit_guru\\' + self.repo_name + '.pkl'
            #self.committed_file = up(os.getcwd()) + '\\data\\committed_files\\' + self.repo_name + '_committed_file.pkl'
        self.buggy_clean_pairs = self.read_commits()
        #self.buggy_clean_pairs = self.buggy_clean_pairs[0:5]
        # Reference current directory, so we can go back after we are done.
        self.cwd = Path(self.root_dir)
        #self.repo = self.clone_repo()
        # Generate path to store udb files
        #self.udb_path = self.cwd.joinpath(".temp", "udb")
        self.udb_path = self.cwd.joinpath("temp", "udb/"+self.repo_name)

        # Create a folder to hold the udb files
        if not self.udb_path.is_dir():
            os.makedirs(self.udb_path)
    def test_mesh_export_W3D_too_few_uv_layers(self):
        copyfile(
            up(up(up(self.relpath()))) + '/testfiles/texture.dds',
            self.outpath() + 'texture.dds')
        self.file_format = 'W3D'
        mesh = bpy.data.meshes.new('mesh_cube')

        b_mesh = bmesh.new()
        bmesh.ops.create_cube(b_mesh, size=1)
        b_mesh.to_mesh(mesh)

        material, principled = create_material_from_vertex_material(
            'loem ipsum', get_vertex_material())
        tex = find_texture(self, 'texture.dds')
        principled.base_color_texture.image = tex
        mesh.materials.append(material)

        mesh_ob = bpy.data.objects.new('mesh_object', mesh)
        mesh_ob.data.object_type = 'MESH'

        coll = bpy.context.scene.collection
        coll.objects.link(mesh_ob)
        bpy.context.view_layer.objects.active = mesh_ob
        mesh_ob.select_set(True)

        meshes, _ = retrieve_meshes(self, None, None, 'container_name')

        self.assertEqual(0, len(meshes[0].material_passes[0].tx_stages))
Exemple #7
0
 def get_smpl_file(self):
     if self.gender == 'neutral':
         THREE_UP = up(up(up(__file__)))
         SMPL_PATH = os.path.join(THREE_UP, 'assets/neutral_smpl.pkl')
         return SMPL_PATH
     else:
         raise (NotImplemented)
Exemple #8
0
def _find_project(view):
    directory = up(abspath(view.file_name()))
    while '__init__.py' in os.listdir(directory) and directory != '/':
        directory = up(directory)
    if directory == '/':
        return up(abspath(view.file_name()))
    else:
        return directory
Exemple #9
0
    def root(path: str):
        from os.path import dirname as up
        wd = os.getcwd()
        if "unittest" in wd and "python" in wd:
            # go up 3 dirs
            wd = up((up(up(wd))))

        return os.path.join(wd, path)
Exemple #10
0
 def __init__(self,access_token,repo_owner,source_type,git_url,api_base_url,repo_name):
     self.repo_name = repo_name
     if platform.system() == 'Darwin' or platform.system() == 'Linux':
         self.data_path = up(os.getcwd()) + '/data/'
     else:
         self.data_path = up(os.getcwd()) + '\\data\\'
     self.git_client = api_access.git_api_access(access_token,repo_owner,source_type,git_url,api_base_url,repo_name)
     self.git_repo = git2repo.git2repo(git_url,repo_name)
     self.repo = self.git_repo.clone_repo()
Exemple #11
0
 def __init__(self, repo_url, repo_name):
     self.repo_url = repo_url
     self.repo_name = repo_name
     self.commit = []
     if platform.system() == 'Darwin' or platform.system() == 'Linux':
         self.repo_path = up(up(os.getcwd())) + '/temp_repo/' + repo_name
     else:
         self.repo_path = up(up(os.getcwd())) + '\\temp_repo\\' + repo_name
     self.clone_repo()
Exemple #12
0
 def __init__(self, project_name, repo_url, repo_name):
     self.project_name = project_name
     if platform.system() == 'Darwin' or platform.system() == 'Linux':
         self.data_path = up(os.getcwd()) + '/data/'
     else:
         self.data_path = up(os.getcwd()) + '\\data\\'
     self.commit = self.read_files('commit')
     self.committed_files = self.read_files('committed_file')
     self.initilize_repo(repo_url, repo_name)
     self.cores = cpu_count()
 def generate_repo_path(self):
     def randomStringDigits(stringLength=6):
         """Generate a random string of letters and digits """
         lettersAndDigits = string.ascii_letters + string.digits
         return ''.join(random.choice(lettersAndDigits) for i in range(stringLength))
     ukey = randomStringDigits(8)    
     if platform.system() == 'Darwin' or platform.system() == 'Linux':
         repo_path = up(os.getcwd()) + '/temp_repo/' + ukey + '/' + self.repo_name
     else:
         repo_path = up(os.getcwd()) + '\\temp_repo\\' + ukey + '\\' + self.repo_name
     return repo_path
Exemple #14
0
    def test_mesh_import_2_textures_1_vertex_material(self):
        mesh = get_mesh_two_textures()

        copyfile(
            up(up(self.relpath())) + '/testfiles/texture.dds',
            self.outpath() + 'texture.dds')
        copyfile(
            up(up(self.relpath())) + '/testfiles/texture.dds',
            self.outpath() + 'texture2.dds')

        create_mesh(self, mesh, bpy.context.collection)
 def test_loadParameterSetFromDir1(self):
     """
     Try loading from X, where the FS looks like:
       X/
         2013-03/
           GenomicConsensus/
             QuiverParameters.ini
     """
     quiverParamsIni = m._findParametersFile()
     X = up(up(up(quiverParamsIni)))
     paramSets = m._loadParameterSets(m._findParametersFile(X))
     assert "C2.AllQVsModel" in paramSets
 def test_loadParameterSetFromDir2(self):
     """
     Try loading from specified bundle
       X/
         2013-03/  <------------ here
           GenomicConsensus/
             QuiverParameters.ini
     """
     quiverParamsIni = m._findParametersFile()
     X = up(up(quiverParamsIni))
     paramSets = m._loadParameterSets(m._findParametersFile(X))
     assert "C2.AllQVsModel" in paramSets
Exemple #17
0
 def __init__(self, access_token, repo_owner, source_type, git_url,
              api_base_url, repo_name):
     self.repo_name = repo_name
     if platform.system() == 'Darwin' or platform.system() == 'Linux':
         self.data_path = up(os.getcwd()) + '/data/'
     else:
         self.data_path = up(os.getcwd()) + '\\data\\'
     if not os.path.exists(self.data_path):
         os.makedirs(self.data_path)
     self.git_client = api_access.git_api_access(access_token, repo_owner,
                                                 source_type, git_url,
                                                 api_base_url, repo_name)
Exemple #18
0
def import_server_variable():

    # python3 compatibility by setting unicode = str
    import sys
    if sys.version_info[0] >= 3:
        unicode = str
        
    # getting logged in user
    global username
    #import getpass
    #user = getpass.getuser()
    # or (see loggedInUser in shell scripts)
    from SystemConfiguration import SCDynamicStoreCopyConsoleUser
    import sys
    username = (SCDynamicStoreCopyConsoleUser(None, None, None) or [None])[0]; username = [username,""][username in [u"loginwindow", None, u""]]
    #print (username)
    
    # defining path to script with server variable
    from os.path import dirname as up
    three_up = up(up(up(__file__)))
    #print (three_up)
    path = (three_up) + "/_scripts_input_keep/finder_favorites.py"
    #path = (three_up) + "/_scripts_input_keep/finder_favorites_" + username + ".py"
    
    # checking if file exists
    import os
    if not os.path.exists(path):
        print("file " + path + " does not exists, exiting...")
        quit()
    
    # reading server variable
    def getVarFromFile(filename):
        import imp
        f = open(filename)
        global data
        data = imp.load_source('data', path)
        f.close()
    
    getVarFromFile(path)
    print ('')
    print("severs entry...")
    print (data.servers)
    global servers
    servers = (data.servers)
    
    # checking if server variable is defined
    try:
        servers
    except NameError:
        print("servers is not defined, exiting...")
        quit()
    else:
        print('')
Exemple #19
0
 def test_loadParameterSetFromDir2(self):
     """
     Try loading from specified bundle
       X/
         2013-03/  <------------ here
           GenomicConsensus/
             QuiverParameters.ini
     """
     quiverParamsIni = m._findParametersFile()
     X = up(up(quiverParamsIni))
     paramSets = m._loadParameterSets(m._findParametersFile(X))
     assert "C2.AllQVsModel" in paramSets
Exemple #20
0
 def test_loadParameterSetFromDir1(self):
     """
     Try loading from X, where the FS looks like:
       X/
         2013-03/
           GenomicConsensus/
             QuiverParameters.ini
     """
     quiverParamsIni = m._findParametersFile()
     X = up(up(up(quiverParamsIni)))
     paramSets = m._loadParameterSets(m._findParametersFile(X))
     assert "C2.AllQVsModel" in paramSets
    def test_modifiers_are_applied_on_export(self):
        self.loadBlend(
            up(up(up(self.relpath()))) +
            '/testfiles/cube_with_modifiers.blend')

        self.assertTrue('Cube' in bpy.data.objects)

        meshes, _ = retrieve_meshes(self, None, None, 'container_name')

        self.assertEqual(1, len(meshes))

        mesh = meshes[0]
        self.assertEqual(42, len(mesh.verts))
Exemple #22
0
    def test_vertex_material_roundtrip(self):
        mesh = get_mesh()

        copyfile(
            up(up(self.relpath())) + '/testfiles/texture.dds',
            self.outpath() + 'texture.dds')

        for source in mesh.vert_materials:
            (material,
             _) = create_material_from_vertex_material(mesh.name(), source)
            principled = node_shader_utils.PrincipledBSDFWrapper(
                material, is_readonly=True)
            actual = retrieve_vertex_material(material, principled)
            compare_vertex_materials(self, source, actual)
Exemple #23
0
def module_api(self):
    query = self.options['query']
    project_root = up(up(up(__file__)))
    filepath = os.path.join(project_root, 'data', 'username_checker.json')
    with open(filepath) as handle:
        data = json.load(handle)
    thread(self, data, query, self.options['thread'])
    output = OUTPUT

    self.save_gather(output,
                     'osint/username_search',
                     query,
                     output=self.options.get('output'))
    return output
Exemple #24
0
def load_parkinson(target,
                   random_state=None,
                   preprocess='standardise',
                   label='total',
                   test_size=0.2):
    dir_path = [
        os.path.join(up(up(up(os.path.abspath(__file__)))), 'parkinson')
    ]
    data_list = []
    supp_list = []
    if label == 'both':
        label_list = ['total', 'motor']
        total_group = 84
    else:
        label_list = [label]
        total_group = 42
    for label in label_list:
        for ind in range(0, 42):
            for path in dir_path:
                if os.path.exists(path):
                    file_path = os.path.join(path,
                                             'patient_{}.npy'.format(ind))
                    patient = np.load(file_path)
            patient_x = patient[:, :-2]
            patient_x = patient_x[:,
                                  2:]  # Dropping age and sex, same for all patients.
            print(patient_x.shape)
            if preprocess == 'standardise':
                patient_x = preprocessing.scale(patient_x)
            if label == 'motor':
                patient_y = np.expand_dims(patient[:, -2], 1)
            elif label == 'total':
                patient_y = np.expand_dims(patient[:, -1], 1)
            patient_y = standardise(patient_y)
            train_x, test_x, train_y, test_y = train_test_split(
                patient_x,
                patient_y,
                test_size=test_size,
                stratify=binarize_labels(patient_y),
                random_state=random_state)
            name = 'pat_{}_{}'.format(label, ind)
            patient_data = data(train_x, test_x, train_y, test_y, name=name)
            data_list.append(patient_data)
            supp_list.append(name)
    assert target < total_group, 'Target index is more than total group.'
    index = range(0, total_group)
    index.remove(target)
    return data_list[target], [data_list[i] for i in index
                               ], [supp_list[i] for i in [target] + index]
Exemple #25
0
 def viewClicked(self, index):
     from os.path import dirname as up
     import os
     tmodel = index.model()
     col = index.column()
     row = tmodel.rowCount()
     self.leaf_view.clicked.disconnect(self.viewClicked)
     parentOfMasterFolder = up(up(os.getcwd()))
     ImportedPath = os.path.join(parentOfMasterFolder, "IMPORTED FOLDER")
     if not os.path.isdir(ImportedPath):
         os.makedirs(ImportedPath)
     with open(os.path.join(ImportedPath, 'IMPORTED_DATA.txt'), 'a') as f:
         for i in range(0, row):
             f.write(str(tmodel.cell(i, col)) + '\n')
     f.close()
Exemple #26
0
def start(metadata_filepath, json_settings):
    metadatas = json.load(open(metadata_filepath))
    metadatas2 = prepare_metadata(metadatas).items
    username = os.path.basename(up(up(metadata_filepath)))
    site_name = os.path.basename(up(up(up(metadata_filepath))))
    for metadata in metadatas2:
        metadata.valid = fix_metadata(metadata.valid, json_settings, username,
                                      site_name)
        metadata.invalid = fix_metadata(metadata.invalid, json_settings,
                                        username, site_name)
    metadatas2 = json.loads(
        json.dumps(metadatas2, default=lambda o: o.__dict__))
    if metadatas != metadatas2:
        main_helper.update_metadata(metadata_filepath, metadatas2)
    return metadatas2
Exemple #27
0
def compute(projects, code_path, core):
    for i in range(projects.shape[0]):
        try:
            print("I am here")
            understand_source = []
            last_analyzed = None
            access_token = projects.loc[i, 'access_token']
            repo_owner = projects.loc[i, 'repo_owner']
            source_type = projects.loc[i, 'source_type']
            git_url = projects.loc[i, 'git_url']
            api_base_url = projects.loc[i, 'api_base_url']
            repo_name = projects.loc[i, 'repo_name']
            repo_lang = projects.loc[i, 'lang']
            understand_source.append([1, repo_name, git_url, last_analyzed])
            understand_source_df = pd.DataFrame(
                understand_source,
                columns=['id', 'name', 'url', 'last_analyzed'])
            file_path = up(
                code_path) + '/data/commit_guru/' + repo_name + '.csv'
            os.chdir(code_path)
            get_matrix = compute_metrics_final.MetricsGetter(
                git_url, repo_name, repo_lang, code_path)
            matrix = get_matrix.get_defective_pair_metrics()
            project_list.loc[i, 'done'] = 1
            project_list.to_csv('completed_projects_' + str(core) + '.csv')
            print(project_list)
            print('Done')
        except Exception as e:
            print("error", e)
            continue
Exemple #28
0
def mine(projects, code_path):
    for i in range(projects.shape[0]):
        try:
            #print("I am here")
            understand_source = []
            last_analyzed = None
            #access_token = projects.loc[i,'access_token']
            #repo_owner = projects.loc[i,'repo_owner']
            #source_type = projects.loc[i,'source_type']
            git_url = projects.loc[i, 'git_url']
            #api_base_url = projects.loc[i,'api_base_url']
            repo_name = projects.loc[i, 'repo_name']
            repo_lang = projects.loc[i, 'lang']
            understand_source.append([1, repo_name, git_url, last_analyzed])
            understand_source_df = pd.DataFrame(
                understand_source,
                columns=['id', 'name', 'url', 'last_analyzed'])
            file_path = up(
                code_path) + '/data/commit_guru/' + repo_name + '.csv'
            #      cas_manager = CAS_Manager(understand_source_df)
            #      if os.path.isfile(file_path):
            #        print('file exist')
            #        cas_manager.run_ingestion()
            ##      else:
            #       cas_manager.run()
            #     os.chdir(code_path)
            #     print(code_path)
            get_matrix = git_understand.MetricsGetter(git_url, repo_name,
                                                      repo_lang, code_path)
            matrix = get_matrix.get_defective_pair_metrics()
            #matrix.to_csv(str(repo_name)+"_final_metrics")
            #print('Done')
        except ValueError as e:
            print("error", e)
            continue
    def test_texture_file_extensions_is_tga_if_file_is_tga_but_dds_referenced(
            self):
        with (patch.object(self, 'info')) as report_func:
            for extension in extensions:
                copyfile(
                    up(up(up(self.relpath()))) + '/testfiles/texture.dds',
                    self.outpath() + 'texture.tga')

                find_texture(self, 'texture', 'texture.dds')

                # reset scene
                bpy.ops.wm.read_homefile(use_empty=True)
                os.remove(self.outpath() + 'texture.tga')

                report_func.assert_called_with(
                    f'loaded texture: {self.outpath()}texture.tga')
    def test_invalid_texture_file_extension(self):
        extensions = ['.invalid']

        with (patch.object(self, 'warning')) as report_func:
            for extension in extensions:
                copyfile(
                    up(up(up(self.relpath()))) + '/testfiles/texture.dds',
                    self.outpath() + 'texture' + extension)

                find_texture(self, 'texture')

                # reset scene
                bpy.ops.wm.read_homefile(use_empty=True)
                os.remove(self.outpath() + 'texture' + extension)

                report_func.assert_called()
Exemple #31
0
def mine(projects,code_path,th_num):
  for i in range(projects.shape[0]):
    try:
      print("I am here")
      understand_source = []
      last_analyzed = None
      access_token = projects.loc[i,'access_token']
      repo_owner = projects.loc[i,'repo_owner']
      source_type = projects.loc[i,'source_type']
      git_url = projects.loc[i,'git_url']
      api_base_url = projects.loc[i,'api_base_url']
      repo_name = projects.loc[i,'repo_name']
      repo_lang = projects.loc[i,'lang']
      understand_source.append([1,repo_name,git_url,last_analyzed])
      understand_source_df = pd.DataFrame(understand_source,columns = ['id','name','url','last_analyzed'])
      file_path = up(code_path) + '/data/commit_guru/' + repo_name + '.csv'
      cas_manager = CAS_Manager(understand_source_df)
      if os.path.isfile(file_path):
        print('file exist')
        cas_manager.run_ingestion()
      else:
        cas_manager.run()
      os.chdir(code_path)
      print(code_path)
      get_matrix = git_understand.MetricsGetter(git_url,repo_name,repo_lang,code_path)
      matrix = get_matrix.get_defective_pair_udb_files()
      projects.loc[i,'done'] = 1
      get_matrix_computed = compute_metrics_final.MetricsGetter(git_url,repo_name,repo_lang,code_path)
      matrix_computed = get_matrix_computed.get_defective_pair_metrics()
      projects.to_csv('Test_projects_' + str(th_num) + '.csv') 
      print('Done')
    except Exception as e:
      print("error",e)
      continue
    def test_multiuser_mesh_with_modifiers_export(self):
        self.loadBlend(
            up(up(up(self.relpath()))) +
            '/testfiles/multiuser_mesh_with_modifiers.blend')

        self.assertTrue('Cube' in bpy.data.objects)
        self.assertTrue('Cube2' in bpy.data.objects)

        meshes, _ = retrieve_meshes(self, None, None, 'container_name')

        self.assertEqual(2, len(meshes))

        mesh = meshes[0]
        self.assertEqual(42, len(mesh.verts))

        mesh2 = meshes[1]
        self.assertEqual(34, len(mesh2.verts))
Exemple #33
0
def find_boost():
    """
    Look for boost in some standard filesystem places.
    """
    boosts_found = (
        glob("/usr/include/boost/version.hpp")
        + glob("/usr/include/boost*/boost/version.hpp")
        + glob("/usr/local/boost/version.hpp")
        + glob("/usr/local/boost*/boost/version.hpp")
        + glob("/opt/local/include/boost/version.hpp")
        + glob("/home/UNIXHOME/dalexander/Packages/boost_1_47_0/boost/version.hpp")
    )

    best_boost_found = (None, None)

    # pick the boost with the largest version number
    for boost in boosts_found:
        version = parse_boost_version(boost)
        boost_root = up(up(boost))
        print "Boost located: %s : version %s" % (boost_root, version)
        if version > best_boost_found[1]:
            best_boost_found = (boost_root, version)
    return best_boost_found[0]
Exemple #34
0
def restart_apache():
    run(oj(up(REMOTE_DIR), 'apache2/bin/restart'))
Exemple #35
0
"""
Writing 2D data processed with Nmrpipe back into Topspin format

"""
import os
from os.path import dirname as up
import nmrglue as ng
import matplotlib.pyplot as plt
import numpy as np

# set data paths
DATA_DIR = os.path.join(up(up(up(os.path.abspath(__file__)))),
                        'data', 'bruker_2d')

# read in data processed using TOPSPIN
# this is required only to get access to the dictionary
# as a reference starting point to write the data back out
# in a way Topspin can read it
tdic, _ = ng.bruker.read_pdata(os.path.join(DATA_DIR, 'pdata', '1'))

# read in data processed using nmrpipe
# alternately, this data can be processed with nmrglue itself
ndic, ndata = ng.pipe.read(os.path.join(DATA_DIR, 'test.ft2'))

# Update dictionary parameters to match the data
# Note that none of the other parameters will macth what is
# written in the procs files and the procs file will not correspond to
# the actual data
tdic['procs']['SI'] = 2048
tdic['proc2s']['SI'] = 1024