def render_file_revisions (request, repo_id): """List all history versions of a file.""" days_str = request.GET.get('days', '') try: days = int(days_str) except ValueError: days = 7 path = request.GET.get('p', '/') if path[-1] == '/': path = path[:-1] u_filename = os.path.basename(path) if not path: return render_error(request) repo = get_repo(repo_id) if not repo: error_msg = _(u"Library does not exist") return render_error(request, error_msg) filetype = get_file_type_and_ext(u_filename)[0].lower() if filetype == 'text' or filetype == 'markdown': can_compare = True else: can_compare = False try: commits = seafile_api.get_file_revisions(repo_id, path, -1, -1, days) except SearpcError, e: logger.error(e.msg) return render_error(request, e.msg)
def render_file_revisions(request, repo_id): """List all history versions of a file.""" days_str = request.GET.get('days', '') try: days = int(days_str) except ValueError: days = 7 path = request.GET.get('p', '/') if path[-1] == '/': path = path[:-1] u_filename = os.path.basename(path) if not path: return render_error(request) repo = get_repo(repo_id) if not repo: error_msg = _(u"Library does not exist") return render_error(request, error_msg) filetype = get_file_type_and_ext(u_filename)[0].lower() if filetype == 'text' or filetype == 'markdown': can_compare = True else: can_compare = False try: commits = seafile_api.get_file_revisions(repo_id, path, -1, -1, days) except SearpcError, e: logger.error(e.msg) return render_error(request, e.msg)
def _calc_file_last_modified(self, repo_id, file_path, file_path_hash, file_id): try: # get the lastest one file revision commits = seafile_api.get_file_revisions(repo_id, file_path, 1, -1) except SearpcError, e: return '', 0
def get_all_file_revisions(repo_id, path, commit_id=None): """ Only used for test revert file. py.test tests/api/endpoints/test_file_view.py::FileViewTest::test_can_revert_file """ all_file_revisions = [] if not commit_id: repo = seafile_api.get_repo(repo_id) commit_id = repo.head_cmmt_id file_revisions = seafile_api.get_file_revisions(repo_id, commit_id, path, -1) all_file_revisions += file_revisions # if commit's rev_renamed_old_path value not None, seafile will stop searching. # so always uses `rev_renamed_old_path` info. next_start_commit = file_revisions[-1].next_start_commit if next_start_commit: path = file_revisions[-2].rev_renamed_old_path if \ len(file_revisions) > 1 else None file_revisions = get_all_file_revisions(repo_id, path, next_start_commit) all_file_revisions += file_revisions # from seafile_api: # @next_start_commit: commit_id for next page. # An extra commit which only contains @next_start_commit will be appended to the list. return all_file_revisions[0:-1]
def get_file_revisions_after_renamed(repo_id, path): all_file_revisions = [] repo = seafile_api.get_repo(repo_id) commit_id = repo.head_cmmt_id start_time = time.time() keep_on_search = True while keep_on_search: file_revisions = seafile_api.get_file_revisions(repo_id, commit_id, path, 50) all_file_revisions += file_revisions[0:-1] end_time = time.time() next_start_commit = file_revisions[-1].next_start_commit rev_renamed_old_path = file_revisions[-2].rev_renamed_old_path if \ len(file_revisions) > 1 else None if not next_start_commit or \ rev_renamed_old_path or \ end_time - start_time > GET_FILE_HISTORY_TIMEOUT: # have searched all commits or # found a file renamed/moved commit or # timeout keep_on_search = False else: # keep on searching, use next_start_commit # as the commit_id start to search commit_id = next_start_commit return all_file_revisions
def get_file_revisions_after_renamed(repo_id, path): all_file_revisions = [] repo = seafile_api.get_repo(repo_id) commit_id = repo.head_cmmt_id start_time = time.time() keep_on_search = True while keep_on_search: file_revisions = seafile_api.get_file_revisions( repo_id, commit_id, path, 50) all_file_revisions += file_revisions[0:-1] end_time = time.time() next_start_commit = file_revisions[-1].next_start_commit rev_renamed_old_path = file_revisions[-2].rev_renamed_old_path if \ len(file_revisions) > 1 else None if not next_start_commit or \ rev_renamed_old_path or \ end_time - start_time > GET_FILE_HISTORY_TIMEOUT: # have searched all commits or # found a file renamed/moved commit or # timeout keep_on_search = False else: # keep on searching, use next_start_commit # as the commit_id start to search commit_id = next_start_commit return all_file_revisions
def get_file_revisions_within_limit(repo_id, path, commit_id=None, limit=50): if not commit_id: repo = seafile_api.get_repo(repo_id) commit_id = repo.head_cmmt_id file_revisions = seafile_api.get_file_revisions(repo_id, commit_id, path, limit) next_start_commit = file_revisions[-1].next_start_commit return file_revisions[0:-1], next_start_commit
def _get_file_contributors_from_revisions(self, repo_id, file_path): """Inspect the file history and get a list of users who have modified the file. """ commits = [] try: commits = seafile_api.get_file_revisions(repo_id, file_path, -1, -1) except SearpcError, e: return [], 0, ''
def test_revert_file_with_invalid_user_permission(self): # first rename file new_name = randstring(6) seafile_api.rename_file(self.repo_id, '/', self.file_name, new_name, self.user_name) new_file_path = '/' + new_name # get file revisions commits = seafile_api.get_file_revisions(self.repo_id, new_file_path, -1, -1, 100) # then revert file data = {'operation': 'revert', 'commit_id': commits[0].id} resp = self.client.post(self.url + '?p=' + new_file_path, data) self.assertEqual(403, resp.status_code)
def test_revert_file_with_invalid_user_permission(self): # first rename file new_name = randstring(6) seafile_api.rename_file(self.repo_id, '/', self.file_name, new_name, self.user_name) new_file_path = '/' + new_name # get file revisions commits = seafile_api.get_file_revisions( self.repo_id, new_file_path, -1, -1, 100) # then revert file data = { 'operation': 'revert', 'commit_id': commits[0].id } resp = self.client.post(self.url + '?p=' + new_file_path, data) self.assertEqual(403, resp.status_code)
def test_file_operation(): t_repo_version = 1 t_repo_id1 = api.create_repo('test_file_operation1', '', USER, passwd=None) create_the_file() # test post_file assert api.post_file(t_repo_id1, file_path, '/', file_name, USER) == 0 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + file_name) t_file_size = len(file_content) assert t_file_size == api.get_file_size(t_repo_id1, t_repo_version, t_file_id) # test post_dir assert api.post_dir(t_repo_id1, '/', dir_name, USER) == 0 # test copy_file (synchronize) t_copy_file_result1 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id1, '/', new_file_name, USER, 0, 1) assert t_copy_file_result1 assert t_copy_file_result1.task_id is None assert not t_copy_file_result1.background t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + new_file_name) assert t_file_size == api.get_file_size(t_repo_id1, t_repo_version, t_file_id) # test copy_file (asynchronous) t_repo_id2 = api.create_repo('test_file_operation2', '', USER, passwd=None) usage = api.get_user_self_usage(USER) api.set_user_quota(USER, usage + 1) t_copy_file_result2 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id2, '/', file_name, USER, 1, 0) assert t_copy_file_result2 assert t_copy_file_result2.background while True: time.sleep(0.1) t_copy_task = api.get_copy_task(t_copy_file_result2.task_id) assert t_copy_task.failed assert t_copy_task.failed_reason == 'Quota is full' if t_copy_task.failed: break api.set_user_quota(USER, -1) t_copy_file_result2 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id2, '/', file_name, USER, 1, 0) assert t_copy_file_result2 assert t_copy_file_result2.task_id assert t_copy_file_result2.background while True: time.sleep(0.1) t_copy_task = api.get_copy_task(t_copy_file_result2.task_id) if t_copy_task.successful: break t_file_id = api.get_file_id_by_path(t_repo_id2, '/' + file_name) assert t_file_size == api.get_file_size(t_repo_id2, t_repo_version, t_file_id) # test move_file (synchronize) t_move_file_info1 = api.get_dirent_by_path(t_repo_id1, '/' + new_file_name) t_move_file_result1 = api.move_file(t_repo_id1, '/', new_file_name, t_repo_id1, '/' + dir_name, new_file_name, 1, USER, 0, 1) assert t_move_file_result1 t_move_file_info2 = api.get_dirent_by_path( t_repo_id1, '/' + dir_name + '/' + new_file_name) assert t_move_file_info1.mtime == t_move_file_info2.mtime t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + new_file_name) assert t_file_id is None # test move_file (synchronize) t_move_file_result1 = api.move_file(t_repo_id1, '/' + dir_name, new_file_name, t_repo_id1, '/', new_file_name_2, 1, USER, 0, 1) assert t_move_file_result1 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + dir_name + '/' + new_file_name) assert t_file_id is None # test move_file (asynchronous) usage = api.get_user_self_usage(USER) api.set_user_quota(USER, usage + 1) t_move_file_result2 = api.move_file(t_repo_id1, '/', file_name, t_repo_id2, '/', new_file_name, 1, USER, 1, 0) assert t_move_file_result2 assert t_move_file_result2.task_id assert t_move_file_result2.background while True: time.sleep(0.1) t_move_task = api.get_copy_task(t_move_file_result2.task_id) assert t_move_task.failed assert t_move_task.failed_reason == 'Quota is full' if t_move_task.failed: break api.set_user_quota(USER, -1) t_move_file_result2 = api.move_file(t_repo_id1, '/', file_name, t_repo_id2, '/', new_file_name, 1, USER, 1, 0) assert t_move_file_result2 assert t_move_file_result2.task_id assert t_move_file_result2.background while True: time.sleep(0.1) t_move_task = api.get_copy_task(t_move_file_result2.task_id) if t_move_task.successful: break t_file_id = api.get_file_id_by_path(t_repo_id2, '/' + new_file_name) assert t_file_size == api.get_file_size(t_repo_id2, t_repo_version, t_file_id) # test post_empty_file assert api.post_empty_file(t_repo_id1, '/' + dir_name, empty_file_name, USER) == 0 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + dir_name + '/' + empty_file_name) assert api.get_file_size(t_repo_id1, t_repo_version, t_file_id) == 0 # test rename_file assert api.rename_file(t_repo_id1, '/' + dir_name, empty_file_name, new_empty_file_name, USER) == 0 #test put_file t_new_file_id = api.put_file(t_repo_id1, file_path, '/' + dir_name, new_empty_file_name, USER, None) assert t_new_file_id # test get_file_revisions t_commit_list = api.get_file_revisions(t_repo_id2, None, '/' + file_name, 2) assert t_commit_list assert len(t_commit_list) == 2 assert t_commit_list[0].creator_name == USER # test del_file assert api.del_file(t_repo_id2, '/', file_name, USER) == 0 # test get_deleted t_deleted_file_list = api.get_deleted(t_repo_id2, 1) assert t_deleted_file_list assert len(t_deleted_file_list) == 2 assert t_deleted_file_list[0].obj_name == file_name assert t_deleted_file_list[0].basedir == '/' # test del a non-exist file. should return 0. assert api.del_file(t_repo_id2, '/', file_name, USER) == 0 assert api.del_file(t_repo_id1, '/' + dir_name, new_empty_file_name, USER) == 0 assert api.del_file(t_repo_id1, '/' + dir_name, new_file_name, USER) == 0 assert api.del_file(t_repo_id2, '/', new_file_name, USER) == 0 assert api.del_file(t_repo_id1, '/', new_file_name_2, USER) == 0 time.sleep(1) api.remove_repo(t_repo_id1)