def test_publish(self): assert len(Draft.objects.all()) == 0 d = Draft.objects.add(self.user.username, self.repo, self.file) assert d is not None assert len(Draft.objects.all()) == 1 assert seafile_api.get_file_id_by_path(d.origin_repo_id, d.draft_file_path) is not None assert len(seafile_api.list_dir_by_path(self.repo.id, '/Drafts')) == 1 d.publish(self.user.username) # file is updated in origin repo assert len(seafile_api.list_dir_by_path(self.repo.id, '/')) == 2
def get_dirents(request, repo_id): """ Get dirents in a dir for file tree """ if not request.is_ajax(): raise Http404 content_type = 'application/json; charset=utf-8' username = request.user.username # permission checking user_perm = check_repo_access_permission(repo_id, username) if user_perm is None: err_msg = _(u"You don't have permission to access the library.") return HttpResponse(json.dumps({"err_msg": err_msg}), status=403, content_type=content_type) path = request.GET.get('path', '') dir_only = request.GET.get('dir_only', False) if not path: err_msg = _(u"No path.") return HttpResponse(json.dumps({"err_msg": err_msg}), status=400, content_type=content_type) try: dirents = seafile_api.list_dir_by_path(repo_id, path.encode('utf-8')) except SearpcError, e: return HttpResponse(json.dumps({"err_msg": e.msg}), status=500, content_type=content_type)
def get_keeper_default_library(): try: from_repos = seafile_api.get_owned_repo_list(SERVER_EMAIL) from_repos = [ r for r in from_repos if r.name == KEEPER_DEFAULT_LIBRARY ] if from_repos: from_repo_id = from_repos[0].id else: logging.info( "Cannot find KEEPER_DEFAULT_LIBRARY repo in admin libraries, trying to create..." ) from_repo_id = create_keeper_default_library() if from_repo_id is None: raise Exception( "Cannot create KEEPER_DEFAULT_LIBRARY repo in admin libraries, please check!" ) else: logging.info( "KEEPER_DEFAULT_LIBRARY has been successfully created!") return { 'repo_id': from_repo_id, 'dirents': seafile_api.list_dir_by_path(from_repo_id, '/') } except Exception as err: logging.error("Cannot find KEEPER_DEFAULT_LIBRARY dirents, err: " + str(err)) return None
def count_files_recursive(repo_id, path='/'): num_files = 0 for e in seafile_api.list_dir_by_path(repo_id, path): if stat.S_ISDIR(e.mode): num_files += count_files_recursive(repo_id, os.path.join(path, e.obj_name)) else: num_files += 1 return num_files
def get_keeper_default_library(): try: from_repos = seafile_api.get_owned_repo_list(SERVER_EMAIL) from_repos = [r for r in from_repos if r.name == KEEPER_DEFAULT_LIBRARY] if from_repos: from_repo_id = from_repos[0].id return { 'repo_id': from_repo_id, 'dirents': seafile_api.list_dir_by_path(from_repo_id, '/') } else: raise Exception("Cannot find KEEPER_DEFAULT_LIBRARY repo in admin libraries, please install!") except Exception as err: logging.error("Cannot find KEEPER_DEFAULT_LIBRARY dirents, err: " + str(err)) return None
def get_dir_file_recursively(repo_id, path, all_dirs): is_pro = is_pro_version() path_id = seafile_api.get_dir_id_by_path(repo_id, path) dirs = seafile_api.list_dir_by_path(repo_id, path, -1, -1) for dirent in dirs: entry = {} if stat.S_ISDIR(dirent.mode): entry["type"] = 'dir' else: entry["type"] = 'file' entry['modifier_email'] = dirent.modifier entry["size"] = dirent.size if is_pro: entry["is_locked"] = dirent.is_locked entry["lock_owner"] = dirent.lock_owner if dirent.lock_owner: entry["lock_owner_name"] = email2nickname( dirent.lock_owner) entry["lock_time"] = dirent.lock_time entry["parent_dir"] = path entry["id"] = dirent.obj_id entry["name"] = dirent.obj_name entry["mtime"] = dirent.mtime all_dirs.append(entry) # Use dict to reduce memcache fetch cost in large for-loop. file_list = [item for item in all_dirs if item['type'] == 'file'] contact_email_dict = {} nickname_dict = {} modifiers_set = {x['modifier_email'] for x in file_list} for e in modifiers_set: if e not in contact_email_dict: contact_email_dict[e] = email2contact_email(e) if e not in nickname_dict: nickname_dict[e] = email2nickname(e) for e in file_list: e['modifier_contact_email'] = contact_email_dict.get( e['modifier_email'], '') e['modifier_name'] = nickname_dict.get(e['modifier_email'], '') if stat.S_ISDIR(dirent.mode): sub_path = posixpath.join(path, dirent.obj_name) get_dir_file_recursively(repo_id, sub_path, all_dirs) return all_dirs
def get_dirents(request, repo_id): """ Get dirents in a dir for file tree """ if not request.is_ajax(): raise Http404 content_type = 'application/json; charset=utf-8' username = request.user.username # permission checking user_perm = check_repo_access_permission(repo_id, username) if user_perm is None: err_msg = _(u"You don't have permission to access the library.") return HttpResponse(json.dumps({"err_msg": err_msg}), status=403, content_type=content_type) path = request.GET.get('path', '') dir_only = request.GET.get('dir_only', False) all_dir = request.GET.get('all_dir', False) if not path: err_msg = _(u"No path.") return HttpResponse(json.dumps({"err_msg": err_msg}), status=400, content_type=content_type) # get dirents for every path element if all_dir: all_dirents = [] path_eles = path.split('/')[:-1] for i, x in enumerate(path_eles): ele_path = '/'.join(path_eles[:i + 1]) + '/' try: ele_path_dirents = seafile_api.list_dir_by_path( repo_id, ele_path.encode('utf-8')) except SearpcError, e: ele_path_dirents = [] ds = [] for d in ele_path_dirents: if stat.S_ISDIR(d.mode): ds.append(d.obj_name) all_dirents.append(ds) return HttpResponse(json.dumps(all_dirents), content_type=content_type)
def create_default_repo(self, username): default_repo_id = seafile_api.create_repo(name=_("My Library"), desc=_("My Library"), username=username, passwd=None) sys_repo_id = get_system_default_repo_id() if not sys_repo_id or not seafile_api.get_repo(sys_repo_id): return None dirents = seafile_api.list_dir_by_path(sys_repo_id, '/') for dirent in dirents: obj_name = dirent.obj_name seafile_api.copy_file(sys_repo_id, '/', obj_name, default_repo_id, '/', obj_name, username, 0) UserOptions.objects.set_default_repo(username, default_repo_id) return default_repo_id
def iterate_and_del_files_recursively(repo_id, path, days): dirents = seafile_api.list_dir_by_path(repo_id, path) for dirent in dirents: if stat.S_ISDIR(dirent.mode): iterate_and_del_files_recursively( repo_id, os.path.join(path, dirent.obj_name), days) continue mtime = dirent.mtime cur_time = int(time.time()) time_delta = days * 24 * 60 * 60 if cur_time - time_delta > mtime: file_full_path = os.path.join(path, dirent.obj_name) seafile_api.del_file(repo_id, path, dirent.obj_name, 'seafevents') logger.info('{} of {} deleted at {}.'.format( file_full_path, repo_id, cur_time))
def get_wiki_dirs_by_path(repo_id, path, all_dirs): dirs = seafile_api.list_dir_by_path(repo_id, path) for dirent in dirs: entry = {} if stat.S_ISDIR(dirent.mode): entry["type"] = 'dir' else: entry["type"] = 'file' entry["parent_dir"] = path entry["id"] = dirent.obj_id entry["name"] = dirent.obj_name entry["size"] = dirent.size entry["mtime"] = dirent.mtime all_dirs.append(entry) return all_dirs
def create_default_library(request): """Create a default library for user. Arguments: - `username`: """ username = request.user.username # Disable user guide no matter user permission error or creation error, # so that the guide popup only show once. UserOptions.objects.disable_user_guide(username) if not request.user.permissions.can_add_repo(): return if is_org_context(request): org_id = request.user.org.org_id default_repo = seafile_api.create_org_repo(name=_("My Library"), desc=_("My Library"), username=username, passwd=None, org_id=org_id) else: default_repo = seafile_api.create_repo(name=_("My Library"), desc=_("My Library"), username=username, passwd=None) sys_repo_id = get_system_default_repo_id() if sys_repo_id is None: return try: dirents = seafile_api.list_dir_by_path(sys_repo_id, '/') for e in dirents: obj_name = e.obj_name seafile_api.copy_file(sys_repo_id, '/', obj_name, default_repo, '/', obj_name, username, 0) except SearpcError as e: logger.error(e) return UserOptions.objects.set_default_repo(username, default_repo) return default_repo
def create_default_repo(self, username): default_repo_id = seafile_api.create_repo(name=_("My Library"), desc=_("My Library"), username=username) sys_repo_id = get_system_default_repo_id() if not sys_repo_id or not seafile_api.get_repo(sys_repo_id): return None dirents = seafile_api.list_dir_by_path(sys_repo_id, '/') for dirent in dirents: obj_name = dirent.obj_name seafile_api.copy_file(sys_repo_id, '/', obj_name, default_repo_id, '/', obj_name, username, 0) UserOptions.objects.set_default_repo(username, default_repo_id) return default_repo_id
def get_dirents(request, repo_id): """ Get dirents in a dir for file tree """ if not request.is_ajax(): raise Http404 content_type = 'application/json; charset=utf-8' username = request.user.username # permission checking user_perm = check_repo_access_permission(repo_id, username) if user_perm is None: err_msg = _(u"You don't have permission to access the library.") return HttpResponse(json.dumps({"err_msg": err_msg}), status=403, content_type=content_type) path = request.GET.get('path', '') dir_only = request.GET.get('dir_only', False) all_dir = request.GET.get('all_dir', False) if not path: err_msg = _(u"No path.") return HttpResponse(json.dumps({"err_msg": err_msg}), status=400, content_type=content_type) # get dirents for every path element if all_dir: all_dirents = [] path_eles = path.split('/')[:-1] for i, x in enumerate(path_eles): ele_path = '/'.join(path_eles[:i+1]) + '/' try: ele_path_dirents = seafile_api.list_dir_by_path(repo_id, ele_path.encode('utf-8')) except SearpcError, e: ele_path_dirents = [] ds = [] for d in ele_path_dirents: if stat.S_ISDIR(d.mode): ds.append(d.obj_name) all_dirents.append(ds) return HttpResponse(json.dumps(all_dirents), content_type=content_type)
def get_wiki_dirs_by_path(repo_id, path, all_dirs): dirs = seafile_api.list_dir_by_path(repo_id, path) for dirent in dirs: entry = {} if stat.S_ISDIR(dirent.mode): entry["type"] = 'dir' else: entry["type"] = 'file' entry["parent_dir"] = path entry["name"] = dirent.obj_name all_dirs.append(entry) if stat.S_ISDIR(dirent.mode): sub_path = posixpath.join(path, dirent.obj_name) get_wiki_dirs_by_path(repo_id, sub_path, all_dirs) return all_dirs
def slug(request, slug, file_path="home.md"): """Show wiki page. """ # get wiki object or 404 wiki = get_object_or_404(Wiki, slug=slug) file_path = "/" + file_path is_dir = None file_id = seafile_api.get_file_id_by_path(wiki.repo_id, file_path) if file_id: is_dir = False dir_id = seafile_api.get_dir_id_by_path(wiki.repo_id, file_path) if dir_id: is_dir = True # compatible with old wiki url if is_dir is None: if len(file_path.split('.')) == 1: new_path = file_path[1:] + '.md' return HttpResponseRedirect(reverse('wiki:slug', args=[slug, new_path])) # perm check req_user = request.user.username if not req_user and not wiki.has_read_perm(request): return redirect('auth_login') else: if not wiki.has_read_perm(request): return render_permission_error(request, _('Unable to view Wiki')) file_type, ext = get_file_type_and_ext(posixpath.basename(file_path)) if file_type == IMAGE: file_url = reverse('view_lib_file', args=[wiki.repo_id, file_path]) return HttpResponseRedirect(file_url + "?raw=1") if not req_user: user_can_write = False elif req_user == wiki.username or check_folder_permission( request, wiki.repo_id, '/') == 'rw': user_can_write = True else: user_can_write = False is_public_wiki = False if wiki.permission == 'public': is_public_wiki = True has_index = False dirs = seafile_api.list_dir_by_path(wiki.repo_id, '/') for dir_obj in dirs: if dir_obj.obj_name == 'index.md': has_index = True break try: fs = FileShare.objects.get(repo_id=wiki.repo_id, path='/') except FileShare.DoesNotExist: fs = FileShare.objects.create_dir_link(wiki.username, wiki.repo_id, '/', permission='view_download') wiki.permission = 'public' wiki.save() is_public_wiki = True repo = seafile_api.get_repo(wiki.repo_id) return render(request, "wiki/wiki.html", { "wiki": wiki, "repo_name": repo.name if repo else '', "page_name": file_path, "shared_token": fs.token, "shared_type": fs.s_type, "user_can_write": user_can_write, "file_path": file_path, "filename": os.path.splitext(os.path.basename(file_path))[0], "repo_id": wiki.repo_id, "search_repo_id": wiki.repo_id, "search_wiki": True, "is_public_wiki": is_public_wiki, "is_dir": is_dir, "has_index": has_index, })
def _test_under_path(path): repo = TEST_REPO path = path.rstrip('/') #sdir = repo.get_dir('/') parent_dir = '/' if path: dirs = [p for p in path.split('/') if p] for d in dirs: api.post_dir(repo.get('id'), parent_dir, d, USER) parent_dir = parent_dir + d + '/' entries = davclient.repo_listdir(repo, path) self.assertEmpty(entries) # create a folder from webapi and list it in webdav dirname = 'folder-%s' % randstring() api.post_dir(repo.get('id'), parent_dir, dirname, USER) entries = davclient.repo_listdir(repo, parent_dir) self.assertHasLen(entries, 1) sfolder = entries[0] self.assertEqual(dav_basename(sfolder), dirname) # create a file from webapi and list it in webdav testfpath = os.path.join(os.path.dirname(__file__), 'data', 'test.txt') with open(testfpath, 'rb') as fp: testfcontent = fp.read() fname = 'uploaded-file-%s.txt' % randstring() api.post_file(repo.get('id'), testfpath, parent_dir, fname, USER) entries = davclient.repo_listdir(repo, parent_dir) self.assertHasLen(entries, 2) downloaded_file = davclient.repo_getfile( repo, posixpath.join(parent_dir, fname)) assert downloaded_file == testfcontent # create a folder through webdav, and check it in webapi dirname = 'another-level1-folder-%s' % randstring(10) davclient.repo_mkdir(repo, parent_dir, dirname) entries = api.list_dir_by_path(repo.get('id'), parent_dir) self.assertHasLen(entries, 3) davdir = [e for e in entries if e.obj_name == dirname][0] self.assertEqual(davdir.obj_name, dirname) # upload a file through webdav, and check it in webapi fname = 'uploaded-file-%s' % randstring() repo_fpath = posixpath.join(parent_dir, fname) davclient.repo_uploadfile(repo, testfpath, repo_fpath) entries = api.list_dir_by_path(repo.get('id'), parent_dir) self.assertHasLen(entries, 4) # remove a dir through webdav self.assertIn(dirname, [dirent.obj_name for dirent in \ api.list_dir_by_path(repo.get('id'), parent_dir)]) davclient.repo_removedir(repo, os.path.join(parent_dir, dirname)) entries = api.list_dir_by_path(repo.get('id'), parent_dir) self.assertHasLen(entries, 3) self.assertNotIn(dirname, [dirent.obj_name for dirent in entries]) # remove a file through webdav self.assertIn(fname, [dirent.obj_name for dirent in \ api.list_dir_by_path(repo.get('id'), parent_dir)]) davclient.repo_removefile(repo, os.path.join(parent_dir, fname)) entries = api.list_dir_by_path(repo.get('id'), parent_dir) self.assertHasLen(entries, 2) self.assertNotIn(fname, [dirent.obj_name for dirent in entries])
def search_files_in_repo(repo, search_path, keyword, obj_desc): def is_matched(d): """ Filter function :param d: dirent object :return: True if object meets the conditions, False otherwise """ obj_type = obj_desc.get('obj_type') suffixes = obj_desc['suffixes'] time_range = obj_desc['time_range'] time_from = time_range[0] time_to = time_range[1] size_range = obj_desc['size_range'] size_from = size_range[0] size_to = size_range[1] if re.search(keyword, d.obj_name, re.I) is None: # check keyword return False if obj_type is not None: # check obj_type is_dir = stat.S_ISDIR(d.mode) if (is_dir and obj_type == 'file') or (not is_dir and obj_type == 'dir'): return False if suffixes is not None: # check suffixes suffix = d.obj_name.split('.')[-1] if suffix not in suffixes: return False if time_from is not None: # check time_from if d.mtime < int(time_from): return False if time_to is not None: # check time_to if d.mtime > int(time_to): return False if size_from is not None: # check size_from if d.size < int(size_from): return False if size_to is not None: # check size_to if d.size > int(size_to): return False return True if search_path[-1] != '/': search_path = '{0}/'.format(search_path) file_list = [] dirs = seafile_api.list_dir_by_path(repo.repo_id, search_path) for dirent in dirs: is_dir = stat.S_ISDIR(dirent.mode) if is_matched(dirent): f = { 'oid': dirent.object_id, 'repo_id': repo.repo_id, 'name': dirent.obj_name, 'permission': dirent.permission, 'is_dir': is_dir, 'fullpath': search_path + dirent.obj_name, 'parent_dir': search_path.rstrip('/'), 'last_modified_by': dirent.modifier, 'last_modified': dirent.mtime, 'size': dirent.size, 'repo': repo, 'repo_name': repo.name, 'repo_owner_email': repo.owner, 'repo_owner_name': repo.owner_nickname, 'repo_owner_contact_email': repo.owner_contact_email } if repo.origin_path: if f['fullpath'].startswith(repo.origin_path): f['repo_id'] = repo.repo_id f['fullpath'] = f['fullpath'].split(repo.origin_path)[-1] file_list.append(f) if is_dir: # directory # Recursive call nested_list = search_files_in_repo( repo, search_path + dirent.obj_name + '/', keyword, obj_desc) file_list.extend(nested_list) return file_list
def _get_dir_list_by_path(repo_id, parent_dir): dirs = seafile_api.list_dir_by_path(repo_id, parent_dir) return [ get_dirent_info(dir) for dir in dirs if '.dtable' not in dir.obj_name ]
def test_file_property_and_dir_listing(): t_repo_version = 1 t_repo_id = api.create_repo('test_file_property_and_dir_listing', '', USER, passwd=None) create_the_file() api.post_file(t_repo_id, file_path, '/', file_name, USER) api.post_dir(t_repo_id, '/', dir_name, USER) api.post_file(t_repo_id, file_path, '/' + dir_name, file_name, USER) #test is_valid_filename t_valid_file_name = 'valid_filename' t_invalid_file_name = '/invalid_filename' assert api.is_valid_filename(t_repo_id, t_valid_file_name) assert api.is_valid_filename(t_repo_id, t_invalid_file_name) == 0 #test get_file_id_by_path t_file_id = api.get_file_id_by_path(t_repo_id, '/test.txt') assert t_file_id #test get_dir_id_by_path t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir') assert t_dir_id #test get_file_size t_file_size = len(file_content) assert t_file_size == api.get_file_size(t_repo_id, t_repo_version, t_file_id) #test get_dir_size t_dir_size = len(file_content) assert t_dir_size == api.get_dir_size(t_repo_id, t_repo_version, t_dir_id) #test get_file_count_info_by_path t_file_count_info = api.get_file_count_info_by_path(t_repo_id, '/') assert t_file_count_info.file_count == 2 assert t_file_count_info.dir_count == 1 assert t_file_count_info.size == t_file_size + t_dir_size #test get_file_id_by_commit_and_path t_file_id_tmp = t_file_id t_repo = api.get_repo(t_repo_id) assert t_repo t_commit_id = t_repo.head_cmmt_id t_file_id = api.get_file_id_by_commit_and_path(t_repo_id, t_commit_id, '/test.txt') assert t_file_id == t_file_id_tmp #test get_dirent_by_path std_file_mode = 0100000 | 0644 t_dirent_obj = api.get_dirent_by_path(t_repo_id, '/test.txt') assert t_dirent_obj assert t_dirent_obj.obj_id == t_file_id assert t_dirent_obj.obj_name == 'test.txt' assert t_dirent_obj.mode == std_file_mode assert t_dirent_obj.version == t_repo_version assert t_dirent_obj.size == t_file_size assert t_dirent_obj.modifier == USER #test list_file_by_file_id t_block_list = api.list_file_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_blocks_by_file_id t_block_list = api.list_blocks_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_dir_by_dir_id t_dir_list = api.list_dir_by_dir_id(t_repo_id, t_dir_id) assert len(t_dir_list) == 1 #test list_dir_by_path t_dir_list = api.list_dir_by_path(t_repo_id, '/test_dir') assert len(t_dir_list) == 1 #test get_dir_id_by_commit_and_path t_dir_id = api.get_dir_id_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert t_dir_id #test list_dir_by_commit_and_path t_dir_list = api.list_dir_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert len(t_dir_list) == 1 #test list_dir_with_perm t_dir_list = api.list_dir_with_perm(t_repo_id, '/test_dir', t_dir_id, USER) assert len(t_dir_list) == 1 #test mkdir_with_parent api.mkdir_with_parents(t_repo_id, '/test_dir', 'test_subdir', USER) t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir/test_subdir') assert t_dir_id #test get_total_storage t_total_size = api.get_total_storage() t_repo_size = api.get_repo_size(t_repo_id) assert t_total_size == t_repo_size #get_total_file_number time.sleep(1) assert api.get_total_file_number() == 2 api.remove_repo(t_repo_id)
try: dirents = seafile_api.list_dir_by_path(repo_id, path.encode('utf-8')) except SearpcError, e: return HttpResponse(json.dumps({"err_msg": e.msg}), status=500, content_type=content_type) dirent_list = [] for dirent in dirents: if stat.S_ISDIR(dirent.mode): dirent.has_subdir = False if dir_only: dirent_path = os.path.join(path, dirent.obj_name) try: dirent_dirents = seafile_api.list_dir_by_path( repo_id, dirent_path.encode('utf-8')) except SearpcError, e: dirent_dirents = [] for dirent_dirent in dirent_dirents: if stat.S_ISDIR(dirent_dirent.props.mode): dirent.has_subdir = True break subdir = { 'name': dirent.obj_name, 'id': dirent.obj_id, 'type': 'dir', 'has_subdir': dirent. has_subdir, # to decide node 'state' ('closed' or not) in jstree 'repo_id': repo_id, }
for i, x in enumerate(path_eles): ele_path = '/'.join(path_eles[:i+1]) + '/' try: ele_path_dirents = seafile_api.list_dir_by_path(repo_id, ele_path.encode('utf-8')) except SearpcError, e: ele_path_dirents = [] ds = [] for d in ele_path_dirents: if stat.S_ISDIR(d.mode): ds.append(d.obj_name) all_dirents.append(ds) return HttpResponse(json.dumps(all_dirents), content_type=content_type) # get dirents in path try: dirents = seafile_api.list_dir_by_path(repo_id, path.encode('utf-8')) except SearpcError, e: return HttpResponse(json.dumps({"err_msg": e.msg}), status=500, content_type=content_type) dirent_list = [] for dirent in dirents: if stat.S_ISDIR(dirent.mode): dirent.has_subdir = False if dir_only: dirent_path = os.path.join(path, dirent.obj_name) try: dirent_dirents = seafile_api.list_dir_by_path(repo_id, dirent_path.encode('utf-8')) except SearpcError, e: dirent_dirents = [] for dirent_dirent in dirent_dirents: