def heatmap(data, method, metric): rowlinks = hierarchy(data, axis='rows', method=method, metric=metric) collinks = hierarchy(data, axis='columns', method=method, metric=metric) rowlinks['cluster'] = rowlinks.index rowlinks.loc[0] = [-1, -1, -1, -1, -1] rowlinks.columns = ['_' + c for c in rowlinks.columns] rowlinks.index = data.index data = pd.concat([data, rowlinks], axis=1) collinks['cluster'] = collinks.index for ind in range(6): collinks.loc[-ind] = [-1, -1, -1, -1, -1] collinks = collinks.transpose() collinks.columns = data.columns if len(data.index.names) > 1: collinks.index = [ tuple(['_' + c] + ([''] * (len(data.index.names) - 1))) for c in collinks.index ] else: collinks.index = ['_' + c for c in collinks.index] data = pd.concat([data, collinks]) return data
def checkout(self, revset=None): 'check out a revset' #Checkout manifest manifest if revset is None: revset = self.revset() revset = Revset.cast(self, revset) #Always throw away local rug changes - uncommitted changes to the manifest.xml file are lost self.manifest_repo.checkout(revset, force=True) #reread manifest self.read_manifest() if not self.bare: sub_repos = hierarchy.hierarchy(self.repos.keys()) for r in self.repos.values(): url = self.remotes[r['remote']]['fetch'] + '/' + r['name'] #if the repo doesn't exist, clone it repo = r['repo'] if not repo: self.create_repo(r, sub_repos[r['path']]) else: #Verify remotes if r['remote'] not in repo.remote_list(): repo.remote_add(r['remote'], url) else: #Currently easier to just set the remote URL rather than check and set if different repo.remote_set_url(r['remote'], url) #Fetch from remote #TODO:decide if we should always do this here. Sometimes have to, since we may not have #seen this remote before repo.fetch(r['remote']) branches = self.get_branch_names(r) #create rug and bookmark branches if they don't exist #branches are fully qualified ('refs/...') branch names, so use update_ref #instead of create_branch for b in ['rug', 'bookmark']: if not repo.valid_rev(branches[b]): repo.update_ref(branches[b], branches['remote']) for b in ['rug_index', 'bookmark_index']: if repo.valid_rev(branches[b]): repo.delete_ref(branches[b]) #create and checkout the live branch repo.update_ref(branches['live_plumbing'], branches['rug']) repo.checkout(branches['live_porcelain']) self.output.append('revset %s checked out' % revset.get_short_name())
def __init__(self): """ Конструктор. Можно добавить пару парамнтров типа имени базы и тд. """ self.db = db.connect("nyaadb") # db, self.db... попахивает срачем self.cu = self.db.cursor() self.st = systables.systables(self.cu, self.db) self.hier = hierarchy.hierarchy("default", self.cu, self.db) self.fatr = unix_attr.unix_attr("def_files", self.cu, self.db) self.datr = unix_attr.unix_attr("def_dirs", self.cu, self.db) self.xattr=xattr.xattr(self.cu, self.db) tmp = int(time.time()) at = {} at["st_ctime"] = at["st_atime"] = at["st_mtime"] = tmp at["st_mode"] = stat.S_IFDIR | 0755 at["st_nlink"] = 2 try: self.datr.add_item(0, at) except NyaError, e: return
def update(self, recursive=False): #TODO: implement per repo update repos = self.repos.values() #if repos is None: # repos = self.repos.values() #else: # #TODO: turn list of strings into repos # pass if self.dirty(): raise RugError('Project has uncommitted changes - commit before updating') #TODO:update manifest? sub_repos = hierarchy.hierarchy(self.repos.keys()) for r in repos: repo = r['repo'] if repo: #Get Branch names, revs, etc. branches = self.get_branch_names(r) head_rev = repo.head() if not repo.valid_rev(branches['remote']): self.output.append('remote branch does not exist in %s: no update' % r['path']) else: remote_rev = repo.rev_class(repo, branches['remote']) #We don't touch the bookmark branch here - we refer to bookmark index branch if it exists, #or bookmark branch if not, and update the bookmark index branch if necessary. Commit updates #bookmark branch and removes bookmark index if repo.valid_rev(branches['bookmark_index']): bookmark_rev = repo.rev_class(repo, branches['bookmark_index']) elif repo.valid_rev(branches['bookmark']): bookmark_rev = repo.rev_class(repo, branches['bookmark']) else: bookmark_rev = None #Check if there are no changes if head_rev.get_sha() == remote_rev.get_sha(): self.output.append('%s is up to date with upstream repo: no update' % r['path']) elif head_rev.is_descendant(remote_rev): self.output.append('%s is ahead of upstream repo: no update' % r['path']) #Fast-Forward if we can elif head_rev.can_fastforward(remote_rev): self.output.append('%s is being fast-forward to upstream repo' % r['path']) repo.merge(remote_rev) repo.update_ref(branches['bookmark_index'], remote_rev) #otherwise rebase/merge local work elif bookmark_branch and head_rev.is_descendant(bookmark_branch): #TODO: currently dead code - we check for dirtyness at the top of the function if repo.dirty(): #TODO: option to stash, rebase, then reapply? self.output.append('%s has local uncommitted changes and cannot be rebased. Skipping this repo.' % r['path']) else: #TODO: option to merge instead of rebase #TODO: handle merge/rebase conflicts #TODO: remember if we're in a conflict state self.output.append('%s is being rebased onto upstream repo' % r['name']) [ret,out,err] = repo.rebase(bookmark_branch, onto=branches['remote']) if ret: self.output.append(out) else: repo.update_ref(branches['bookmark_index'], branches['remote']) elif not bookmark_branch: self.output.append('%s has an unusual relationship with the remote branch, and no bookmark. Skipping this repo.' % r['path']) #Fail #TODO: currently dead code - we check for dirtyness at the top of the function elif head_rev.get_short_name() != r['revision']: self.output.append('%s has changed branches and cannot be safely updated. Skipping this repo.' % r['path']) else: #Weird stuff has happened - right branch, wrong relationship to bookmark self.output.append('You are out of your element. The current branch in %s has been in altered in an unusal way and must be manually updated.' % r['path']) else: repo = self.create_repo(r, sub_repos[r['path']]) self.output.append('Deleted repo %s check out' % r['path']) if recursive: repo.update(recursive)
def checkout(self, revset=None): 'check out a revset' #Checkout manifest manifest if revset is None: revset = self.revset() revset = Revset.cast(self, revset) #Always throw away local rug changes - uncommitted changes to the manifest.xml file are lost self.manifest_repo.checkout(revset, force=True) #reread manifest self.read_manifest() if not self.bare: sub_repos = hierarchy.hierarchy(self.repos.keys()) for r in self.repos.values(): url = self.remotes[r['remote']]['fetch'] + '/' + r['name'] #if the repo doesn't exist, clone it repo = r['repo'] if not repo: self.create_repo(r, sub_repos[r['path']]) else: #Verify remotes if r['remote'] not in repo.remote_list(): repo.remote_add(r['remote'], url) else: candidate_urls = map(lambda c: c % url, RUG_CANDIDATE_TEMPLATES) if repo.config('remote.%s.url' % r['remote']) not in candidate_urls: clone_url = None for cu in candidate_urls: if git.Repo.valid_repo(cu, config=repo_config): clone_url = cu break if clone_url: repo.remote_set_url(r['remote'], clone_url) else: raise RugError('%s does not seem to be a rug project' % url) #Fetch from remote #TODO:decide if we should always do this here. Sometimes have to, since we may not have #seen this remote before repo.fetch(r['remote']) branches = self.get_branch_names(r) #create rug and bookmark branches if they don't exist #branches are fully qualified ('refs/...') branch names, so use update_ref #instead of create_branch for b in ['rug', 'bookmark']: if not repo.valid_rev(branches[b]): repo.update_ref(branches[b], branches['remote']) for b in ['rug_index', 'bookmark_index']: if repo.valid_rev(branches[b]): repo.delete_ref(branches[b]) #create and checkout the live branch repo.update_ref(branches['live_plumbing'], branches['rug']) repo.checkout(branches['live_porcelain']) self.output.append('revset %s checked out' % revset.get_short_name())
data_test = pd.read_csv(dataset_filepath + '/test_final.csv', low_memory=False) output_filename = sys.argv[0] + str(dataset_filepath.split('/')) file_results = open(output_filename, 'wb') #print len(data_train) #data_train = data_train.append(data_valid) #print len(data_train) y_train = data_train['classification'] x_train = data_train.drop('classification', axis=1) y_test = data_test['classification'].values x_test = data_test.drop('classification', axis=1) h = hie.hierarchy(node_filepath) nodes = h.G.nodes() nodes.remove('0') print 'Comecei a construir a arvore' fz = ft.FuzzyTree(x_train, y_train, x_test, h) print 'Terminei de construir a arvore' obtido = [] esperado = [] for label in y_test: esperado.append(fz.getClassVectorMulti(label, np.unique(nodes))) for i, row in x_test.iterrows(): obtido.append(fz.classify(row)) #print obtido #print esperado