def render_recycle_root(request, repo_id, referer): repo = get_repo(repo_id) if not repo: raise Http404 scan_stat = request.GET.get('scan_stat', None) try: deleted_entries = seafile_api.get_deleted(repo_id, 0, '/', scan_stat) except SearpcError as e: logger.error(e) referer = request.META.get('HTTP_REFERER', None) next = settings.SITE_ROOT if referer is None else referer return HttpResponseRedirect(next) if not deleted_entries: new_scan_stat = None else: new_scan_stat = deleted_entries[-1].scan_stat trash_more = True if new_scan_stat is not None else False deleted_entries = deleted_entries[0:-1] for dirent in deleted_entries: if stat.S_ISDIR(dirent.mode): dirent.is_dir = True else: dirent.is_dir = False # Entries sort by deletion time in descending order. deleted_entries.sort(lambda x, y: cmp(y.delete_time, x.delete_time)) username = request.user.username if is_org_context(request): repo_owner = seafile_api.get_org_repo_owner(repo.id) else: repo_owner = seafile_api.get_repo_owner(repo.id) is_repo_owner = True if repo_owner == username else False enable_clean = False if is_repo_owner: enable_clean = True return render_to_response('repo_dir_recycle_view.html', { 'show_recycle_root': True, 'repo': repo, 'repo_dir_name': repo.name, 'dir_entries': deleted_entries, 'scan_stat': new_scan_stat, 'trash_more': trash_more, 'enable_clean': enable_clean, 'referer': referer, }, context_instance=RequestContext(request))
def render_recycle_root(request, repo_id): repo = get_repo(repo_id) if not repo: raise Http404 scan_stat = request.GET.get('scan_stat', None) try: deleted_entries = seafile_api.get_deleted(repo_id, 0, '/', scan_stat) except SearpcError as e: logger.error(e) referer = request.META.get('HTTP_REFERER', None) next = settings.SITE_ROOT if referer is None else referer return HttpResponseRedirect(next) if not deleted_entries: new_scan_stat = None else: new_scan_stat = deleted_entries[-1].scan_stat trash_more = True if new_scan_stat is not None else False deleted_entries = deleted_entries[0:-1] for dirent in deleted_entries: if stat.S_ISDIR(dirent.mode): dirent.is_dir = True else: dirent.is_dir = False # Entries sort by deletion time in descending order. deleted_entries.sort(lambda x, y : cmp(y.delete_time, x.delete_time)) username = request.user.username if is_org_context(request): repo_owner = seafile_api.get_org_repo_owner(repo.id) else: repo_owner = seafile_api.get_repo_owner(repo.id) is_repo_owner = True if repo_owner == username else False enable_clean = False if is_repo_owner: enable_clean = True return render_to_response('repo_dir_recycle_view.html', { 'show_recycle_root': True, 'repo': repo, 'repo_dir_name': repo.name, 'dir_entries': deleted_entries, 'scan_stat': new_scan_stat, 'trash_more': trash_more, 'enable_clean': enable_clean, }, context_instance=RequestContext(request))
def get_trash_file_commit_id(self): deleted_file = seafile_api.get_deleted(self.repo_id, 0, '/', None) return deleted_file[0].commit_id
def ajax_repo_dir_recycle_more(request, repo_id): """ List first/'more' batch of repo/dir trash. """ result = {} content_type = 'application/json; charset=utf-8' repo = seafile_api.get_repo(repo_id) if not repo: err_msg = 'Library %s not found.' % repo_id return HttpResponse(json.dumps({'error': err_msg}), status=404, content_type=content_type) path = request.GET.get('path', '/') path = '/' if path == '' else path if check_folder_permission(request, repo_id, path) != 'rw': err_msg = 'Permission denied.' return HttpResponse(json.dumps({'error': err_msg}), status=403, content_type=content_type) scan_stat = request.GET.get('scan_stat', None) try: # a list will be returned, with at least 1 item in it # the last item is not a deleted entry, and it contains an attribute named 'scan_stat' deleted_entries = seafile_api.get_deleted(repo_id, 0, path, scan_stat) except SearpcError as e: logger.error(e) result['error'] = 'Internal server error' return HttpResponse(json.dumps(result), status=500, content_type=content_type) new_scan_stat = deleted_entries[-1].scan_stat trash_more = True if new_scan_stat is not None else False more_entries_html = '' if len(deleted_entries) > 1: deleted_entries = deleted_entries[0:-1] for dirent in deleted_entries: if stat.S_ISDIR(dirent.mode): dirent.is_dir = True else: dirent.is_dir = False # Entries sort by deletion time in descending order. deleted_entries.sort(lambda x, y : cmp(y.delete_time, x.delete_time)) ctx = { 'show_recycle_root': True, 'repo': repo, 'dir_entries': deleted_entries, 'dir_path': path, 'MEDIA_URL': MEDIA_URL, 'referer': request.GET.get('referer', '') } more_entries_html = render_to_string("snippets/repo_dir_trash_tr.html", ctx) result = { 'html': more_entries_html, 'trash_more': trash_more, 'new_scan_stat': new_scan_stat, } return HttpResponse(json.dumps(result), content_type=content_type)
def get(self, request, repo_id, format=None): """ Return deleted files/dirs of a repo/folder Permission checking: 1. all authenticated user can perform this action. """ # argument check path = request.GET.get('path', '/') # resource check repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) try: dir_id = seafile_api.get_dir_id_by_path(repo_id, path) except SearpcError as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if not dir_id: error_msg = 'Folder %s not found.' % path return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if check_folder_permission(request, repo_id, path) is None: error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: show_days = int(request.GET.get('show_days', '0')) except ValueError: show_days = 0 if show_days < 0: error_msg = 'show_days invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) scan_stat = request.GET.get('scan_stat', None) try: # a list will be returned, with at least 1 item in it # the last item is not a deleted entry, and it contains an attribute named 'scan_stat' deleted_entries = seafile_api.get_deleted(repo_id, show_days, path, scan_stat) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) scan_stat = deleted_entries[-1].scan_stat more = True if scan_stat is not None else False items = [] if len(deleted_entries) > 1: entries_without_scan_stat = deleted_entries[0:-1] # sort entry by delete time entries_without_scan_stat.sort( lambda x, y: cmp(y.delete_time, x.delete_time)) for item in entries_without_scan_stat: item_info = self.get_item_info(item) items.append(item_info) result = { 'data': items, 'more': more, 'scan_stat': scan_stat, } return Response(result)
def test_file_operation(): t_repo_version = 1 t_repo_id1 = api.create_repo('test_file_operation1', '', USER, passwd=None) create_the_file() # test post_file assert api.post_file(t_repo_id1, file_path, '/', file_name, USER) == 0 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + file_name) t_file_size = len(file_content) assert t_file_size == api.get_file_size(t_repo_id1, t_repo_version, t_file_id) # test post_dir assert api.post_dir(t_repo_id1, '/', dir_name, USER) == 0 # test copy_file (synchronize) t_copy_file_result1 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id1, '/', new_file_name, USER, 0, 1) assert t_copy_file_result1 assert t_copy_file_result1.task_id is None assert not t_copy_file_result1.background t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + new_file_name) assert t_file_size == api.get_file_size(t_repo_id1, t_repo_version, t_file_id) # test copy_file (asynchronous) t_repo_id2 = api.create_repo('test_file_operation2', '', USER, passwd=None) usage = api.get_user_self_usage(USER) api.set_user_quota(USER, usage + 1) t_copy_file_result2 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id2, '/', file_name, USER, 1, 0) assert t_copy_file_result2 assert t_copy_file_result2.background while True: time.sleep(0.1) t_copy_task = api.get_copy_task(t_copy_file_result2.task_id) assert t_copy_task.failed assert t_copy_task.failed_reason == 'Quota is full' if t_copy_task.failed: break api.set_user_quota(USER, -1) t_copy_file_result2 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id2, '/', file_name, USER, 1, 0) assert t_copy_file_result2 assert t_copy_file_result2.task_id assert t_copy_file_result2.background while True: time.sleep(0.1) t_copy_task = api.get_copy_task(t_copy_file_result2.task_id) if t_copy_task.successful: break t_file_id = api.get_file_id_by_path(t_repo_id2, '/' + file_name) assert t_file_size == api.get_file_size(t_repo_id2, t_repo_version, t_file_id) # test move_file (synchronize) t_move_file_info1 = api.get_dirent_by_path(t_repo_id1, '/' + new_file_name) t_move_file_result1 = api.move_file(t_repo_id1, '/', new_file_name, t_repo_id1, '/' + dir_name, new_file_name, 1, USER, 0, 1) assert t_move_file_result1 t_move_file_info2 = api.get_dirent_by_path( t_repo_id1, '/' + dir_name + '/' + new_file_name) assert t_move_file_info1.mtime == t_move_file_info2.mtime t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + new_file_name) assert t_file_id is None # test move_file (synchronize) t_move_file_result1 = api.move_file(t_repo_id1, '/' + dir_name, new_file_name, t_repo_id1, '/', new_file_name_2, 1, USER, 0, 1) assert t_move_file_result1 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + dir_name + '/' + new_file_name) assert t_file_id is None # test move_file (asynchronous) usage = api.get_user_self_usage(USER) api.set_user_quota(USER, usage + 1) t_move_file_result2 = api.move_file(t_repo_id1, '/', file_name, t_repo_id2, '/', new_file_name, 1, USER, 1, 0) assert t_move_file_result2 assert t_move_file_result2.task_id assert t_move_file_result2.background while True: time.sleep(0.1) t_move_task = api.get_copy_task(t_move_file_result2.task_id) assert t_move_task.failed assert t_move_task.failed_reason == 'Quota is full' if t_move_task.failed: break api.set_user_quota(USER, -1) t_move_file_result2 = api.move_file(t_repo_id1, '/', file_name, t_repo_id2, '/', new_file_name, 1, USER, 1, 0) assert t_move_file_result2 assert t_move_file_result2.task_id assert t_move_file_result2.background while True: time.sleep(0.1) t_move_task = api.get_copy_task(t_move_file_result2.task_id) if t_move_task.successful: break t_file_id = api.get_file_id_by_path(t_repo_id2, '/' + new_file_name) assert t_file_size == api.get_file_size(t_repo_id2, t_repo_version, t_file_id) # test post_empty_file assert api.post_empty_file(t_repo_id1, '/' + dir_name, empty_file_name, USER) == 0 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + dir_name + '/' + empty_file_name) assert api.get_file_size(t_repo_id1, t_repo_version, t_file_id) == 0 # test rename_file assert api.rename_file(t_repo_id1, '/' + dir_name, empty_file_name, new_empty_file_name, USER) == 0 #test put_file t_new_file_id = api.put_file(t_repo_id1, file_path, '/' + dir_name, new_empty_file_name, USER, None) assert t_new_file_id # test get_file_revisions t_commit_list = api.get_file_revisions(t_repo_id2, None, '/' + file_name, 2) assert t_commit_list assert len(t_commit_list) == 2 assert t_commit_list[0].creator_name == USER # test del_file assert api.del_file(t_repo_id2, '/', file_name, USER) == 0 # test get_deleted t_deleted_file_list = api.get_deleted(t_repo_id2, 1) assert t_deleted_file_list assert len(t_deleted_file_list) == 2 assert t_deleted_file_list[0].obj_name == file_name assert t_deleted_file_list[0].basedir == '/' # test del a non-exist file. should return 0. assert api.del_file(t_repo_id2, '/', file_name, USER) == 0 assert api.del_file(t_repo_id1, '/' + dir_name, new_empty_file_name, USER) == 0 assert api.del_file(t_repo_id1, '/' + dir_name, new_file_name, USER) == 0 assert api.del_file(t_repo_id2, '/', new_file_name, USER) == 0 assert api.del_file(t_repo_id1, '/', new_file_name_2, USER) == 0 time.sleep(1) api.remove_repo(t_repo_id1)