def sys_repo_admin(request): # Make sure page request is an int. If not, deliver first page. try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '25')) except ValueError: current_page = 1 per_page = 25 repos_all = seafile_api.get_repo_list(per_page * (current_page -1), per_page + 1) repos = repos_all[:per_page] if len(repos_all) == per_page + 1: page_next = True else: page_next = False for repo in repos: try: repo.owner = seafile_api.get_repo_owner(repo.id) except: repo.owner = "failed to get" return render_to_response( 'sysadmin/sys_repo_admin.html', { 'repos': repos, 'current_page': current_page, 'prev_page': current_page-1, 'next_page': current_page+1, 'per_page': per_page, 'page_next': page_next, }, context_instance=RequestContext(request))
def get_repo_list_order_by(t_start, t_limit, order_by): t_repo_list = api.get_repo_list(t_start, t_limit, order_by) assert t_repo_list and len(t_repo_list) if order_by == "size": assert t_repo_list[0].size >= t_repo_list[1].size if order_by == "file_count": assert t_repo_list[0].file_count >= t_repo_list[1].file_count
def handle(self, *args, **kwargs): self.stdout.write('Start to get all existing repo') self.all_repo = [ repo.repo_id for repo in seafile_api.get_repo_list(-1, -1) ] trash_repo = [ repo.repo_id for repo in seafile_api.get_trash_repo_list(-1, -1) ] self.all_repo.extend(trash_repo) self.stdout.write('Successly get all existing repos') #on_delete is CASCADE, so FileTag/FileComment will be deleted self.tables = { 'FileUUIDMap': FileUUIDMap, 'RevisionTags': RevisionTags, 'UserStarredFiles': UserStarredFiles, 'ExtraGroupsSharePermission': ExtraGroupsSharePermission, 'ExtraSharePermission': ExtraSharePermission, 'UploadLinkShare': UploadLinkShare } for table in list(self.tables.items()): self.clear_table(table[0], table[1]) self.stdout.write('All invalid repo data are deleted')
def sys_repo_admin(request): # Make sure page request is an int. If not, deliver first page. try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '25')) except ValueError: current_page = 1 per_page = 25 repos_all = seafile_api.get_repo_list(per_page * (current_page - 1), per_page + 1) repos = repos_all[:per_page] if len(repos_all) == per_page + 1: page_next = True else: page_next = False for repo in repos: try: repo.owner = seafile_api.get_repo_owner(repo.id) except: repo.owner = "failed to get" return render_to_response('sysadmin/sys_repo_admin.html', { 'repos': repos, 'current_page': current_page, 'prev_page': current_page - 1, 'next_page': current_page + 1, 'per_page': per_page, 'page_next': page_next, }, context_instance=RequestContext(request))
def list_repos_by_name(repo_name): repos = [] repos_all = seafile_api.get_repo_list(-1, -1) for repo in repos_all: if repo_name in repo.name: try: repo.owner = seafile_api.get_repo_owner(repo.id) except SearpcError: repo.owner = "failed to get" repos.append(repo) return repos
def handle(self, *args, **kwargs): all_repo= [repo.repo_id for repo in seafile_api.get_repo_list(-1, -1)] trash_repo = [repo.repo_id for repo in seafile_api.get_trash_repo_list(-1, -1)] all_repo.extend(trash_repo) #on_delete is CASCADE, so FileTag will be deleted fup_repo_ids = FileUUIDMap.objects.all().values_list('repo_id', flat=True) FileUUIDMap.objects.filter(repo_id__in=list(set(fup_repo_ids) - set(all_repo))).delete() rt_repo_ids = RevisionTags.objects.all().values_list('repo_id', flat=True) RevisionTags.objects.filter(repo_id__in=list(set(rt_repo_ids) - set(all_repo))).delete() self.stdout.write('Invalid repo data deleted')
def generate_catalog(): """ Generate entire catalog and put it into DB cache """ repos_all = [ r for r in seafile_api.get_repo_list(0, MAX_INT) if get_repo_owner(r.id) != 'system' ] return [ generate_catalog_entry(repo) for repo in sorted( repos_all, key=lambda x: x.last_modify, reverse=False) ]
def clean_up_catalog(): """ Remove catalog entries for non existed repos """ reconnect_db() repo_ids = [ r.id for r in seafile_api.get_repo_list(0, MAX_INT) if get_repo_owner(r.id) != 'system' ] i = 0 for ce in Catalog.objects.get_all(): if ce.repo_id not in repo_ids: ce.delete() i += 1 return i
def get_stats_repo_list(): ''' Get stats for complete list of keeper repos Sorted by number of commits, desc ''' try: repo_list = seafile_api.get_repo_list(0, -1) except Exception as e: print ('Error: {}'.format(e)) return # print("KEEPER repos #: {} ".format(len(repo_list))) repos = [] for r in repo_list: repos.append({"repo": r, "owner": seafile_api.get_repo_owner(r.id), "commits_num": len(seafile_api.get_commit_list(r.id, 0, -1)), } ) repos_sorted_commits_num = sorted(repos, key = lambda i: i['commits_num'],reverse=True) # print(repos_sorted_commits_num) print(HEADER) for r in repos_sorted_commits_num: rr = r['repo'] # print(vars(rr)) print(FORMATTER.format( rr.id, rr.name.encode('utf-8'), r['owner'], rr.encrypted, rr.file_count, r['commits_num'], rr.size, ))
def get(self, request, format=None): """ List 'all' libraries (by name/owner/page) Permission checking: 1. only admin can perform this action. """ # search libraries (by name/owner) repo_name = request.GET.get('name', '') owner = request.GET.get('owner', '') repos = [] if repo_name and owner: # search by name and owner owned_repos = seafile_api.get_owned_repo_list(owner) for repo in owned_repos: if not repo.name or repo.is_virtual: continue if repo_name in repo.name: repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": repo_name, "owner": owner, "repos": repos}) elif repo_name: # search by name(keyword in name) repos_all = seafile_api.get_repo_list(-1, -1) for repo in repos_all: if not repo.name or repo.is_virtual: continue if repo_name in repo.name: repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": repo_name, "owner": '', "repos": repos}) elif owner: # search by owner owned_repos = seafile_api.get_owned_repo_list(owner) for repo in owned_repos: if repo.is_virtual: continue repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": '', "owner": owner, "repos": repos}) # get libraries by page try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 start = (current_page - 1) * per_page limit = per_page + 1 repos_all = seafile_api.get_repo_list(start, limit) if len(repos_all) > per_page: repos_all = repos_all[:per_page] has_next_page = True else: has_next_page = False default_repo_id = get_system_default_repo_id() repos_all = filter(lambda r: not r.is_virtual, repos_all) repos_all = filter(lambda r: r.repo_id != default_repo_id, repos_all) return_results = [] for repo in repos_all: repo_info = get_repo_info(repo) return_results.append(repo_info) page_info = { 'has_next_page': has_next_page, 'current_page': current_page } return Response({"page_info": page_info, "repos": return_results})
#!/usr/bin/env python from seaserv import seafile_api from keeper.cdc.cdc_manager import generate_certificate_by_repo MAX_INT = 2147483647 err_list = [] ###### START repos_all = seafile_api.get_repo_list(0, MAX_INT) # total amount of generated CDCs by the run cdc_gen_amount = 0 for repo in repos_all: try: if generate_certificate_by_repo(repo): cdc_gen_amount += 1 except Exception as err: print err err_list.append({'name:': repo.name, 'id': repo.id, 'err': str(err)}) print "Amount of generated certificates:", cdc_gen_amount print "Errors:", err_list #print('\n'.join(sys.path))
def test_repo_manipulation(): #test get_system_default_repo_id t_default_repo_id = api.get_system_default_repo_id() assert t_default_repo_id #test create_repo t_repo_id = api.create_repo('test_repo_manipulation', '', USER, passwd=None) assert t_repo_id #test counts_repo t_repo_count = 0 t_repo_count = api.count_repos() assert t_repo_count != 0 #test get_repo ,edit_repo t_new_name = 'n_name' t_new_desc = 'n_desc' t_repo_version = 1 t_repo = api.get_repo(t_repo_id) assert t_repo api.edit_repo(t_repo_id, t_new_name, t_new_desc, USER) t_repo = api.get_repo(t_repo_id) assert t_repo.name == t_new_name and t_repo.desc == t_new_desc #test revert_repo and get_commit t_commit_id_before_changing = t_repo.head_cmmt_id api.post_dir(t_repo_id, '/', 'dir1', USER) t_repo = api.get_repo(t_repo_id) api.revert_repo(t_repo_id, t_commit_id_before_changing, USER) t_repo = api.get_repo(t_repo_id) t_commit_id_after_revert = t_repo.head_cmmt_id t_commit_before_changing = api.get_commit(t_repo_id, t_repo_version, t_commit_id_before_changing) t_commit_after_revert = api.get_commit(t_repo_id, t_repo_version, t_commit_id_after_revert) assert t_commit_before_changing.root_id == t_commit_after_revert.root_id #test is_repo_owner assert api.is_repo_owner(USER, t_repo_id) assert api.is_repo_owner(USER2, t_repo_id) == 0 #test get_repo_owner owner_get = api.get_repo_owner(t_repo_id) assert owner_get == USER #test set_repo_owner api.set_repo_owner(t_repo_id, USER2) assert api.is_repo_owner(USER2, t_repo_id) #test create_enc_repo t_enc_repo_id = '826d1b7b-f110-46f2-8d5e-7b5ac3e11f4d' t_enc_version = 2 t_passwd = '123' magic_and_random_key = api.generate_magic_and_random_key( t_enc_version, t_enc_repo_id, t_passwd) t_magic = magic_and_random_key.magic t_random_key = magic_and_random_key.random_key t_enc_repo_id = api.create_enc_repo(t_enc_repo_id, 'test_encrypted_repo', '', USER, t_magic, t_random_key, t_enc_version) assert t_enc_repo_id == '826d1b7b-f110-46f2-8d5e-7b5ac3e11f4d' #test get_repo_list t_start = -1 t_limit = -1 t_repo_list = api.get_repo_list(t_start, t_limit) assert t_repo_list and len(t_repo_list) t_start = 1 t_limit = 1 t_repo_list = api.get_repo_list(t_start, t_limit) assert t_repo_list and len(t_repo_list) == 1 #test get_owned_repo_list t_repo_list = api.get_owned_repo_list(USER2) assert t_repo_list and len(t_repo_list) #test get_commit_list t_offset = 0 t_limit = 0 t_commit_list = api.get_commit_list(t_repo_id, t_offset, t_limit) assert t_commit_list and len(t_commit_list) == 4 t_offset = 1 t_limit = 1 t_commit_list = api.get_commit_list(t_repo_id, t_offset, t_limit) assert t_commit_list and len(t_commit_list) == 1 #test remove_repo api.remove_repo(t_repo_id) t_repo = api.get_repo(t_repo_id) assert t_repo == None
def get(self, request, format=None): """ List 'all' libraries (by name/owner/page) Permission checking: 1. only admin can perform this action. """ # search libraries (by name/owner) repo_name = request.GET.get('name', '') owner = request.GET.get('owner', '') repos = [] if repo_name and owner: # search by name and owner owned_repos = seafile_api.get_owned_repo_list(owner) for repo in owned_repos: if not repo.name or repo.is_virtual: continue if repo_name in repo.name: repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": repo_name, "owner": owner, "repos": repos}) elif repo_name: # search by name(keyword in name) repos_all = seafile_api.get_repo_list(-1, -1) for repo in repos_all: if not repo.name or repo.is_virtual: continue if repo_name in repo.name: repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": repo_name, "owner": '', "repos": repos}) elif owner: # search by owner owned_repos = seafile_api.get_owned_repo_list(owner) for repo in owned_repos: if repo.is_virtual: continue repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": '', "owner": owner, "repos": repos}) # get libraries by page try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 start = (current_page - 1) * per_page limit = per_page + 1 repos_all = seafile_api.get_repo_list(start, limit) if len(repos_all) > per_page: repos_all = repos_all[:per_page] has_next_page = True else: has_next_page = False default_repo_id = get_system_default_repo_id() repos_all = [r for r in repos_all if not r.is_virtual] repos_all = [r for r in repos_all if r.repo_id != default_repo_id] return_results = [] for repo in repos_all: repo_info = get_repo_info(repo) return_results.append(repo_info) page_info = { 'has_next_page': has_next_page, 'current_page': current_page } return Response({"page_info": page_info, "repos": return_results})
def wait_for_server(): seafile_api.get_repo_list(0, 1)
def get(self, request, format=None): """ List 'all' libraries (by name/owner/page) Permission checking: 1. only admin can perform this action. """ if not request.user.admin_permissions.can_manage_library(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') order_by = request.GET.get('order_by', '').lower().strip() if order_by and order_by not in ('size', 'file_count'): error_msg = 'order_by invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # search libraries (by name/owner) repo_name = request.GET.get('name', '') owner = request.GET.get('owner', '') repos = [] if repo_name and owner: # search by name and owner orgs = ccnet_api.get_orgs_by_user(owner) if orgs: org_id = orgs[0].org_id owned_repos = seafile_api.get_org_owned_repo_list( org_id, owner) else: owned_repos = seafile_api.get_owned_repo_list(owner) for repo in owned_repos: if not repo.name or repo.is_virtual: continue if repo_name in repo.name: repo_info = get_repo_info(repo) repos.append(repo_info) return Response({ "name": repo_name, "owner": owner, "repos": repos }) elif repo_name: # search by name(keyword in name) repos_all = seafile_api.get_repo_list(-1, -1) for repo in repos_all: if not repo.name or repo.is_virtual: continue if repo_name in repo.name: repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": repo_name, "owner": '', "repos": repos}) elif owner: # search by owner orgs = ccnet_api.get_orgs_by_user(owner) if orgs: org_id = orgs[0].org_id owned_repos = seafile_api.get_org_owned_repo_list( org_id, owner) else: owned_repos = seafile_api.get_owned_repo_list(owner) for repo in owned_repos: if repo.is_virtual: continue repo_info = get_repo_info(repo) repos.append(repo_info) return Response({"name": '', "owner": owner, "repos": repos}) # get libraries by page try: current_page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '100')) except ValueError: current_page = 1 per_page = 100 start = (current_page - 1) * per_page limit = per_page + 1 if order_by: repos_all = seafile_api.get_repo_list(start, limit, order_by) else: repos_all = seafile_api.get_repo_list(start, limit) if len(repos_all) > per_page: repos_all = repos_all[:per_page] has_next_page = True else: has_next_page = False default_repo_id = get_system_default_repo_id() repos_all = [r for r in repos_all if not r.is_virtual] repos_all = [r for r in repos_all if r.repo_id != default_repo_id] return_results = [] for repo in repos_all: repo_info = get_repo_info(repo) return_results.append(repo_info) page_info = { 'has_next_page': has_next_page, 'current_page': current_page } return Response({"page_info": page_info, "repos": return_results})
#!/usr/bin/env python from seaserv import seafile_api from keeper.cdc.cdc_manager import generate_certificate_by_repo MAX_INT = 2147483647 err_list = [] ###### START repos_all = seafile_api.get_repo_list(0, MAX_INT) # total amount of generated CDCs by the run cdc_gen_amount = 0 for repo in repos_all: try: if generate_certificate_by_repo(repo): cdc_gen_amount += 1 except Exception as err: print err err_list.append({"name:": repo.name, "id": repo.id, "err": str(err)}) print "Amount of generated certificates:", cdc_gen_amount print "Errors:", err_list # print('\n'.join(sys.path))
def do_work(self): self._current_commit_position = 0 repo = seafile_api.get_repo_list(self._current_repo_position, 1) if not repo: return -1 repo = repo[0] logging.info('Start processing repo :%s', repo.repo_id) org_id = get_org_id_by_repo_id(repo.repo_id) repo_id = repo.repo_id if org_id > 0: users_obj = seafile_api.org_get_shared_users_by_repo( org_id, repo_id) owner = seafile_api.get_org_repo_owner(repo_id) else: users_obj = seafile_api.get_shared_users_by_repo(repo_id) owner = seafile_api.get_repo_owner(repo_id) users = [e.user for e in users_obj] + [owner] self._last_commit_id = None if repo_id in self._history_repo.keys(): commit_ids = self.get_repo_last_commits(repo_id) count = 0 k = 0 bk = False while True: temp = [ e.id for e in seafile_api.get_commit_list( repo_id, k * 100, 100) ] if not temp: break # avoid two commit at the same time for commit_id in commit_ids: if commit_id[0] in temp: count += 1 if count == len(commit_ids): self._current_commit_position = k * 100 + temp.index( commit_id[0]) + 1 self._last_commit_id = commit_id[0] bk = True break if bk: break k += 1 else: # keeping _current_commit_position zero will restore all activity records of the repo commit_objs = seafile_api.get_commit_list( repo_id, self._current_commit_position, 1) current_commit_id = [e.id for e in commit_objs][0] self._last_commit_id = current_commit_id self.diff_and_update(repo_id, current_commit_id, org_id, users) start_commit_position = self._current_commit_position count_offest = 0 while True: # get last commit and another commits # avoid current_commit_position expired by generate new record commit_objs = seafile_api.get_commit_list( repo_id, self._current_commit_position - 1, 5) commit_ids = [e.id for e in commit_objs] if not commit_objs or len(commit_objs) == 1: break if self._last_commit_id not in commit_ids or commit_objs[ -1].id == self._last_commit_id: self._current_commit_position += 4 count_offest = 4 else: offset = commit_ids.index(self._last_commit_id) self._current_commit_position += offset current_commit_id = commit_ids[offset + 1] self._last_commit_id = commit_ids[offset + 1] self.diff_and_update(repo_id, current_commit_id, org_id, users) count_offest = 1 count = self._current_commit_position - start_commit_position - count_offest logging.info("%s recover %s activity records" % (repo_id, count))
def get_catalog(): catalog = [] repos_all = seafile_api.get_repo_list(0, MAX_INT) #repos_all = [seafile_api.get_repo('a6d4ae75-b063-40bf-a3d9-dde74623bb2c')] for repo in repos_all: try: proj = {} proj["id"] = repo.id proj["name"] = repo.name email = get_repo_owner(repo.id) proj["owner"] = email user_name = get_user_name(email) if user_name != email: proj["owner_name"] = user_name proj["in_progress"] = True commits = get_commits(repo.id, 0, 1) commit = get_commit(repo.id, repo.version, commits[0].id) dir = fs_mgr.load_seafdir(repo.id, repo.version, commit.root_id) file = dir.lookup(ARCHIVE_METADATA_TARGET) if file: md = parse_markdown(file.get_content()) if md: # Author a = md.get("Author") if a: a_list = strip_uni(a.strip()).split('\n') authors = [] for _ in a_list: author = {} aa = _.split(';') author['name'] = aa[0] if len(aa) > 1 and aa[1].strip(): author['affs'] = [x.strip() for x in aa[1].split('|')] author['affs'] = [x for x in author['affs'] if x ] authors.append(author) if a: proj["authors"] = authors # Description d = strip_uni(md.get("Description")) if d: proj["description"] = d # Comments c = strip_uni(md.get("Comments")) if c: proj["comments"] = c #Title t = strip_uni(md.get("Title")) if t: proj["title"] = t del proj["in_progress"] proj["is_certified"] = is_certified_by_repo_id(repo.id) else: if DEBUG: print "No %s for repo %s found" % (ARCHIVE_METADATA_TARGET, repo.name) catalog.append(proj) except Exception as err: msg = "repo_name: %s, id: %s, err: %s" % ( repo.name, repo.id, str(err) ) logging.error (msg) if DEBUG: print msg return catalog