def download_file(request, repo_id, obj_id): """Download file. Arguments: - `request`: - `repo_id`: - `obj_id`: """ username = request.user.username repo = get_repo(repo_id) if not repo: raise Http404 if repo.encrypted and not seafile_api.is_password_set(repo_id, username): return HttpResponseRedirect(reverse('repo', args=[repo_id])) # If vistor's file shared token in url params matches the token in db, # then we know the vistor is from file shared link. share_token = request.GET.get('t', '') fileshare = FileShare.objects.get( token=share_token) if share_token else None shared_by = None if fileshare: from_shared_link = True shared_by = fileshare.username else: from_shared_link = False if from_shared_link: # check whether owner's traffic over the limit if user_traffic_over_limit(fileshare.username): messages.error( request, _(u'Unable to access file: share link traffic is used up.')) next = request.META.get('HTTP_REFERER', settings.SITE_ROOT) return HttpResponseRedirect(next) # Permission check and generate download link path = request.GET.get('p', '') if check_repo_access_permission(repo_id, request.user) or \ get_file_access_permission(repo_id, path, username) or from_shared_link: # Get a token to access file token = seafserv_rpc.web_get_access_token(repo_id, obj_id, 'download', username) else: messages.error(request, _(u'Unable to download file')) next = request.META.get('HTTP_REFERER', settings.SITE_ROOT) return HttpResponseRedirect(next) # send stats message if from_shared_link: try: file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) send_message( 'seahub.stats', 'file-download\t%s\t%s\t%s\t%s' % (repo.id, shared_by, obj_id, file_size)) except Exception, e: logger.error('Error when sending file-download message: %s' % str(e))
def get_user_share_link_info(fileshare): data = {} repo_id = fileshare.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = fileshare.path if path: obj_name = '/' if path == '/' else os.path.basename(path.rstrip('/')) else: obj_name = '' data['repo_name'] = repo.repo_name if repo else '' data['token'] = fileshare.token data['link'] = gen_shared_link(fileshare.token, fileshare.s_type) data['path'] = path data['obj_name'] = obj_name data['is_dir'] = True if fileshare.s_type == 'd' else False data['view_cnt'] = fileshare.view_cnt if fileshare.s_type == 'f': obj_id = seafile_api.get_file_id_by_path(repo_id, path) data['size'] = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) else: data['size'] = '' return data
def view_shared_file(request, token): """ Preview file via shared link. """ assert token is not None # Checked by URLconf try: fileshare = FileShare.objects.get(token=token) except FileShare.DoesNotExist: raise Http404 shared_by = fileshare.username repo_id = fileshare.repo_id repo = get_repo(repo_id) if not repo: raise Http404 path = fileshare.path.rstrip('/') # Normalize file path obj_id = seafile_api.get_file_id_by_path(repo_id, path) if not obj_id: return render_error(request, _(u'File does not exist')) file_size = seafile_api.get_file_size(obj_id) filename = os.path.basename(path) filetype, fileext = get_file_type_and_ext(filename) access_token = seafserv_rpc.web_get_access_token(repo.id, obj_id, 'view', '') raw_path = gen_file_get_url(access_token, filename) inner_path = gen_inner_file_get_url(access_token, filename) # get file content ret_dict = {'err': '', 'file_content': '', 'encoding': '', 'file_enc': '', 'file_encoding_list': [], 'html_exists': False, 'filetype': filetype} fsize = get_file_size(obj_id) exceeds_limit, err_msg = file_size_exceeds_preview_limit(fsize, filetype) if exceeds_limit: err = err_msg else: """Choose different approach when dealing with different type of file.""" if is_textual_file(file_type=filetype): handle_textual_file(request, filetype, inner_path, ret_dict) elif filetype == DOCUMENT: handle_document(inner_path, obj_id, fileext, ret_dict) elif filetype == PDF: handle_pdf(inner_path, obj_id, fileext, ret_dict) # Increase file shared link view_cnt, this operation should be atomic fileshare.view_cnt = F('view_cnt') + 1 fileshare.save() # send statistic messages if ret_dict['filetype'] != 'Unknown': try: obj_size = seafserv_threaded_rpc.get_file_size(obj_id) send_message('seahub.stats', 'file-view\t%s\t%s\t%s\t%s' % \ (repo.id, shared_by, obj_id, obj_size)) except SearpcError, e: logger.error('Error when sending file-view message: %s' % str(e))
def view_file_via_shared_dir(request, token): assert token is not None # Checked by URLconf try: fileshare = FileShare.objects.get(token=token) except FileShare.DoesNotExist: raise Http404 shared_by = fileshare.username repo_id = fileshare.repo_id repo = get_repo(repo_id) if not repo: raise Http404 path = request.GET.get('p', '').rstrip('/') if not path: raise Http404 if not path.startswith(fileshare.path): # Can not view upper dir of shared dir raise Http404 zipped = gen_path_link(path, '') obj_id = seafile_api.get_file_id_by_path(repo_id, path) if not obj_id: return render_error(request, _(u'File does not exist')) file_size = seafile_api.get_file_size(obj_id) filename = os.path.basename(path) filetype, fileext = get_file_type_and_ext(filename) access_token = seafserv_rpc.web_get_access_token(repo.id, obj_id, 'view', '') raw_path = gen_file_get_url(access_token, filename) inner_path = gen_inner_file_get_url(access_token, filename) # get file content ret_dict = {'err': '', 'file_content': '', 'encoding': '', 'file_enc': '', 'file_encoding_list': [], 'html_exists': False, 'filetype': filetype} fsize = get_file_size(obj_id) exceeds_limit, err_msg = file_size_exceeds_preview_limit(fsize, filetype) if exceeds_limit: err = err_msg else: """Choose different approach when dealing with different type of file.""" if is_textual_file(file_type=filetype): handle_textual_file(request, filetype, inner_path, ret_dict) elif filetype == DOCUMENT: handle_document(inner_path, obj_id, fileext, ret_dict) elif filetype == PDF: handle_pdf(inner_path, obj_id, fileext, ret_dict) # send statistic messages try: obj_size = seafserv_threaded_rpc.get_file_size(obj_id) send_message('seahub.stats', 'file-view\t%s\t%s\t%s\t%s' % \ (repo.id, shared_by, obj_id, obj_size)) except SearpcError, e: logger.error('Error when sending file-view message: %s' % str(e))
def download_file(request, repo_id, obj_id): """Download file. Arguments: - `request`: - `repo_id`: - `obj_id`: """ username = request.user.username repo = get_repo(repo_id) if not repo: raise Http404 if repo.encrypted and not seafile_api.is_password_set(repo_id, username): return HttpResponseRedirect(reverse("repo", args=[repo_id])) # If vistor's file shared token in url params matches the token in db, # then we know the vistor is from file shared link. share_token = request.GET.get("t", "") fileshare = FileShare.objects.get(token=share_token) if share_token else None shared_by = None if fileshare: from_shared_link = True shared_by = fileshare.username else: from_shared_link = False if from_shared_link: # check whether owner's traffic over the limit if user_traffic_over_limit(fileshare.username): messages.error(request, _(u"Unable to access file: share link traffic is used up.")) next = request.META.get("HTTP_REFERER", settings.SITE_ROOT) return HttpResponseRedirect(next) # Permission check and generate download link path = request.GET.get("p", "") if ( check_repo_access_permission(repo_id, request.user) or get_file_access_permission(repo_id, path, username) or from_shared_link ): # Get a token to access file token = seafserv_rpc.web_get_access_token(repo_id, obj_id, "download", username) else: messages.error(request, _(u"Unable to download file")) next = request.META.get("HTTP_REFERER", settings.SITE_ROOT) return HttpResponseRedirect(next) # send stats message if from_shared_link: try: file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) send_message("seahub.stats", "file-download\t%s\t%s\t%s\t%s" % (repo.id, shared_by, obj_id, file_size)) except Exception, e: logger.error("Error when sending file-download message: %s" % str(e))
def get_share_link_info(fileshare): data = {} token = fileshare.token repo_id = fileshare.repo_id try: repo = seafile_api.get_repo(repo_id) except Exception as e: logger.error(e) repo = None path = fileshare.path if path: obj_name = '/' if path == '/' else os.path.basename(path.rstrip('/')) else: obj_name = '' if fileshare.expire_date: expire_date = datetime_to_isoformat_timestr(fileshare.expire_date) else: expire_date = '' if fileshare.ctime: ctime = datetime_to_isoformat_timestr(fileshare.ctime) else: ctime = '' ccnet_email = fileshare.username data['creator_email'] = ccnet_email data['creator_name'] = email2nickname(ccnet_email) data['creator_contact_email'] = email2contact_email(ccnet_email) data['repo_id'] = repo_id data['repo_name'] = repo.repo_name if repo else '' data['path'] = path data['obj_name'] = obj_name data['is_dir'] = True if fileshare.s_type == 'd' else False data['token'] = token data['link'] = gen_shared_link(token, fileshare.s_type) data['view_cnt'] = fileshare.view_cnt data['ctime'] = ctime data['expire_date'] = expire_date data['is_expired'] = fileshare.is_expired() data['permissions'] = fileshare.get_permissions() if fileshare.s_type == 'f': obj_id = seafile_api.get_file_id_by_path(repo_id, path) data['size'] = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) return data
def get_file_revision_id_size(repo_id, commit_id, path): """Given a commit and a file path in that commit, return the seafile id and size of the file blob """ repo = seafile_api.get_repo(repo_id) dirname = os.path.dirname(path) filename = os.path.basename(path) seafdir = seafile_api.list_dir_by_commit_and_path(repo_id, commit_id, dirname) for dirent in seafdir: if dirent.obj_name == filename: file_size = seafile_api.get_file_size(repo.store_id, repo.version, dirent.obj_id) return dirent.obj_id, file_size return None, None
def get(self, request, format=None): """ WOPI endpoint for check file info """ token = request.GET.get('access_token', None) info_dict = cache.get('thirdparty_editor_access_token_' + token) request_user = info_dict['request_user'] repo_id = info_dict['repo_id'] file_path = info_dict['file_path'] repo = seafile_api.get_repo(repo_id) obj_id = seafile_api.get_file_id_by_path(repo_id, file_path) try: file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) except SearpcError as e: logger.error(e) return HttpResponse(json.dumps({}), status=500, content_type=json_content_type) if file_size == -1: logger.error('File %s not found.' % file_path) return HttpResponse(json.dumps({}), status=401, content_type=json_content_type) result = {} result['username'] = request_user result['repo_id'] = repo_id result['file_name'] = os.path.basename(file_path) result['file_size'] = file_size result['file_path'] = file_path return HttpResponse(json.dumps(result, ensure_ascii=False), status=200, content_type=json_content_type)
def view_file_via_shared_dir(request, token): assert token is not None # Checked by URLconf try: fileshare = FileShare.objects.get(token=token) except FileShare.DoesNotExist: raise Http404 shared_by = fileshare.username repo_id = fileshare.repo_id repo = get_repo(repo_id) if not repo: raise Http404 path = request.GET.get('p', '').rstrip('/') if not path: raise Http404 if not path.startswith( fileshare.path): # Can not view upper dir of shared dir raise Http404 zipped = gen_path_link(path, '') obj_id = seafile_api.get_file_id_by_path(repo_id, path) if not obj_id: return render_error(request, _(u'File does not exist')) file_size = seafile_api.get_file_size(obj_id) filename = os.path.basename(path) filetype, fileext = get_file_type_and_ext(filename) access_token = seafserv_rpc.web_get_access_token(repo.id, obj_id, 'view', '') raw_path = gen_file_get_url(access_token, filename) inner_path = gen_inner_file_get_url(access_token, filename) # get file content ret_dict = { 'err': '', 'file_content': '', 'encoding': '', 'file_enc': '', 'file_encoding_list': [], 'html_exists': False, 'filetype': filetype } fsize = get_file_size(obj_id) exceeds_limit, err_msg = file_size_exceeds_preview_limit(fsize, filetype) if exceeds_limit: err = err_msg else: """Choose different approach when dealing with different type of file.""" if is_textual_file(file_type=filetype): handle_textual_file(request, filetype, inner_path, ret_dict) elif filetype == DOCUMENT: handle_document(inner_path, obj_id, fileext, ret_dict) elif filetype == PDF: handle_pdf(inner_path, obj_id, fileext, ret_dict) # send statistic messages try: obj_size = seafserv_threaded_rpc.get_file_size(obj_id) send_message('seahub.stats', 'file-view\t%s\t%s\t%s\t%s' % \ (repo.id, shared_by, obj_id, obj_size)) except SearpcError, e: logger.error('Error when sending file-view message: %s' % str(e))
def _arguments_wrapper(request, repo_id, *args, **kwargs): if request.method != 'POST': raise Http404 result = {} content_type = 'application/json; charset=utf-8' repo = get_repo(repo_id) if not repo: result['error'] = _(u'Library does not exist.') return HttpResponse(json.dumps(result), status=400, content_type=content_type) # arguments validation parent_dir = request.GET.get('parent_dir') obj_file_names = request.POST.getlist('file_names') obj_dir_names = request.POST.getlist('dir_names') dst_repo_id = request.POST.get('dst_repo') dst_path = request.POST.get('dst_path') if not (parent_dir and dst_repo_id and dst_path) and \ not (obj_file_names or obj_dir_names): result['error'] = _('Argument missing') return HttpResponse(json.dumps(result), status=400, content_type=content_type) # check file path for obj_name in obj_file_names + obj_dir_names: if len(dst_path+obj_name) > settings.MAX_PATH: result['error'] = _('Destination path is too long for %s.') % escape(obj_name) return HttpResponse(json.dumps(result), status=400, content_type=content_type) # when dst is the same as src if repo_id == dst_repo_id and parent_dir == dst_path: result['error'] = _('Invalid destination path') return HttpResponse(json.dumps(result), status=400, content_type=content_type) # check whether user has write permission to dest repo if check_folder_permission(request, dst_repo_id, dst_path) != 'rw': result['error'] = _('Permission denied') return HttpResponse(json.dumps(result), status=403, content_type=content_type) # Leave src folder/file permission checking to corresponding # views, only need to check folder permission when perform 'move' # operation, 1), if move file, check parent dir perm, 2), if move # folder, check that folder perm. file_obj_size = 0 for obj_name in obj_file_names: full_obj_path = posixpath.join(parent_dir, obj_name) file_obj_id = seafile_api.get_file_id_by_path(repo_id, full_obj_path) file_obj_size += seafile_api.get_file_size( repo.store_id, repo.version, file_obj_id) dir_obj_size = 0 for obj_name in obj_dir_names: full_obj_path = posixpath.join(parent_dir, obj_name) dir_obj_id = seafile_api.get_dir_id_by_path(repo_id, full_obj_path) dir_obj_size += seafile_api.get_dir_size( repo.store_id, repo.version, dir_obj_id) # check quota src_repo_owner = seafile_api.get_repo_owner(repo_id) dst_repo_owner = seafile_api.get_repo_owner(dst_repo_id) try: # always check quota when copy file if view_method.__name__ == 'cp_dirents': out_of_quota = seafile_api.check_quota( dst_repo_id, delta=file_obj_size + dir_obj_size) else: # when move file if src_repo_owner != dst_repo_owner: # only check quota when src_repo_owner != dst_repo_owner out_of_quota = seafile_api.check_quota( dst_repo_id, delta=file_obj_size + dir_obj_size) else: # not check quota when src and dst repo are both mine out_of_quota = False except Exception as e: logger.error(e) result['error'] = _(u'Internal server error') return HttpResponse(json.dumps(result), status=500, content_type=content_type) if out_of_quota: result['error'] = _('Out of quota.') return HttpResponse(json.dumps(result), status=403, content_type=content_type) return view_method(request, repo_id, parent_dir, dst_repo_id, dst_path, obj_file_names, obj_dir_names)
def list_lib_dir(request, repo_id): ''' New ajax API for list library directory ''' content_type = 'application/json; charset=utf-8' result = {} repo = get_repo(repo_id) if not repo: err_msg = _(u'Library does not exist.') return HttpResponse(json.dumps({'error': err_msg}), status=400, content_type=content_type) username = request.user.username path = request.GET.get('p', '/') if path[-1] != '/': path = path + '/' # perm for current dir user_perm = check_folder_permission(request, repo.id, path) if user_perm is None: err_msg = _(u'Permission denied.') return HttpResponse(json.dumps({'error': err_msg}), status=403, content_type=content_type) if repo.encrypted \ and not seafile_api.is_password_set(repo.id, username): err_msg = _(u'Library is encrypted.') return HttpResponse(json.dumps({'error': err_msg, 'lib_need_decrypt': True}), status=403, content_type=content_type) head_commit = get_commit(repo.id, repo.version, repo.head_cmmt_id) if not head_commit: err_msg = _(u'Error: no head commit id') return HttpResponse(json.dumps({'error': err_msg}), status=500, content_type=content_type) dir_list = [] file_list = [] try: dir_id = seafile_api.get_dir_id_by_path(repo.id, path) except SearpcError as e: logger.error(e) err_msg = 'Internal Server Error' return HttpResponse(json.dumps({'error': err_msg}), status=500, content_type=content_type) if not dir_id: err_msg = 'Folder not found.' return HttpResponse(json.dumps({'error': err_msg}), status=404, content_type=content_type) dirs = seafserv_threaded_rpc.list_dir_with_perm(repo_id, path, dir_id, username, -1, -1) starred_files = get_dir_starred_files(username, repo_id, path) for dirent in dirs: dirent.last_modified = dirent.mtime if stat.S_ISDIR(dirent.mode): dpath = posixpath.join(path, dirent.obj_name) if dpath[-1] != '/': dpath += '/' dir_list.append(dirent) else: if repo.version == 0: file_size = seafile_api.get_file_size(repo.store_id, repo.version, dirent.obj_id) else: file_size = dirent.size dirent.file_size = file_size if file_size else 0 dirent.starred = False fpath = posixpath.join(path, dirent.obj_name) if fpath in starred_files: dirent.starred = True file_list.append(dirent) if is_org_context(request): repo_owner = seafile_api.get_org_repo_owner(repo.id) else: repo_owner = seafile_api.get_repo_owner(repo.id) result["is_repo_owner"] = False result["has_been_shared_out"] = False if repo_owner == username: result["is_repo_owner"] = True try: if is_org_context(request): org_id = request.user.org.org_id is_inner_org_pub_repo = False # check if current repo is pub-repo org_pub_repos = seafile_api.list_org_inner_pub_repos_by_owner( org_id, username) for org_pub_repo in org_pub_repos: if repo_id == org_pub_repo.id: is_inner_org_pub_repo = True break if seafile_api.list_org_repo_shared_group(org_id, username, repo_id) or \ seafile_api.list_org_repo_shared_to(org_id, username, repo_id) or \ is_inner_org_pub_repo: result["has_been_shared_out"] = True else: if seafile_api.list_repo_shared_to(username, repo_id) or \ seafile_api.list_repo_shared_group_by_user(username, repo_id) or \ (not request.cloud_mode and seafile_api.is_inner_pub_repo(repo_id)): result["has_been_shared_out"] = True except Exception as e: logger.error(e) result["is_virtual"] = repo.is_virtual result["repo_name"] = repo.name result["user_perm"] = user_perm # check quota for fileupload result["no_quota"] = True if seaserv.check_quota(repo.id) < 0 else False result["encrypted"] = repo.encrypted dirent_list = [] for d in dir_list: d_ = {} d_['is_dir'] = True d_['obj_name'] = d.obj_name d_['last_modified'] = d.last_modified d_['last_update'] = translate_seahub_time(d.last_modified) d_['p_dpath'] = posixpath.join(path, d.obj_name) d_['perm'] = d.permission # perm for sub dir in current dir dirent_list.append(d_) size = int(request.GET.get('thumbnail_size', THUMBNAIL_DEFAULT_SIZE)) for f in file_list: f_ = {} f_['is_file'] = True f_['file_icon'] = file_icon_filter(f.obj_name) f_['obj_name'] = f.obj_name f_['last_modified'] = f.last_modified f_['last_update'] = translate_seahub_time(f.last_modified) f_['starred'] = f.starred f_['file_size'] = filesizeformat(f.file_size) f_['obj_id'] = f.obj_id f_['perm'] = f.permission # perm for file in current dir file_type, file_ext = get_file_type_and_ext(f.obj_name) if file_type == IMAGE: f_['is_img'] = True if not repo.encrypted and ENABLE_THUMBNAIL and \ os.path.exists(os.path.join(THUMBNAIL_ROOT, str(size), f.obj_id)): file_path = posixpath.join(path, f.obj_name) src = get_thumbnail_src(repo_id, size, file_path) f_['encoded_thumbnail_src'] = urlquote(src) if is_pro_version(): f_['is_locked'] = True if f.is_locked else False f_['lock_owner'] = f.lock_owner f_['lock_owner_name'] = email2nickname(f.lock_owner) if username == f.lock_owner: f_['locked_by_me'] = True else: f_['locked_by_me'] = False dirent_list.append(f_) result["dirent_list"] = dirent_list return HttpResponse(json.dumps(result), content_type=content_type)
def get(self, request, name=None): username = request.user.username org_id = request.user.org.org_id if is_org_context(request) else None owned_repos, shared_repos, groups_repos, public_repos = get_user_repos( username, org_id=org_id) repo_id_list =\ [repo.id for repo in owned_repos] + \ [repo.id for repo in shared_repos] + \ [repo.id for repo in groups_repos] + \ [repo.id for repo in public_repos] if name is None: def with_quotes(s): return "'" + s + "'" # sql = """ # SELECT DISTINCT t.* FROM tags_tags t # LEFT JOIN tags_filetag f ON t.id = f.tag_id # LEFT JOIN tags_fileuuidmap m ON f.uuid_id = m.uuid # WHERE m.repo_id IN (%(repo_id_list)s) # """ # tag_list = Tags.objects.raw(sql, {'repo_id_list': repo_id_list}) repo_id_text = ', '.join(map(with_quotes, repo_id_list)) sql = """ SELECT DISTINCT t.* FROM tags_tags t LEFT JOIN tags_filetag f ON t.id = f.tag_id LEFT JOIN tags_fileuuidmap m ON f.uuid_id = m.uuid WHERE m.repo_id IN ({}) """.format(repo_id_text) tag_list = Tags.objects.raw(sql) tag_list = [tag.to_dict() for tag in tag_list] return Response(tag_list, status=status.HTTP_200_OK) else: tag = get_object_or_404(Tags, name=name) fileuuidmap_list = tag.fileuuidmap_set.all() repo = None dir_list = [] file_list = [] for fileuuidmap in fileuuidmap_list: if repo is None or repo.id != fileuuidmap.repo_id: repo = seafile_api.get_repo(fileuuidmap.repo_id) fullpath = posixpath.join(fileuuidmap.parent_path, fileuuidmap.filename) dirent = seafile_api.get_dirent_by_path( fileuuidmap.repo_id, fullpath) dirent.repo_id = repo.id dirent.parent_path = fileuuidmap.parent_path dirent.fullpath = fullpath dirent.last_modified = dirent.mtime if stat.S_ISDIR(dirent.mode): dir_list.append(dirent) else: if repo.version == 0: file_size = seafile_api.get_file_size( repo.store_id, repo.version, dirent.obj_id) else: file_size = dirent.size dirent.file_size = file_size if file_size else 0 can_preview, err_msg = can_preview_file( dirent.obj_name, file_size, repo) dirent.can_preview = can_preview file_list.append(dirent) dirent_list = [] for d in dir_list: d_ = { 'is_dir': True, 'obj_name': d.obj_name, 'last_modified': d.last_modified, 'last_update': translate_seahub_time(d.last_modified), 'p_dpath': d.fullpath, 'perm': d.permission, 'repo_id': d.repo_id, 'parent_path': d.parent_path, } dirent_list.append(d_) size = int( request.GET.get('thumbnail_size', THUMBNAIL_DEFAULT_SIZE)) for f in file_list: f_ = { 'is_file': True, 'obj_name': f.obj_name, 'last_modified': f.last_modified, 'last_update': translate_seahub_time(f.last_modified), 'file_size': filesizeformat(f.file_size), 'obj_id': f.obj_id, 'perm': f.permission, 'can_preview': f.can_preview, 'repo_id': f.repo_id, 'parent_path': f.parent_path, } if not repo.encrypted and ENABLE_THUMBNAIL: file_ext = os.path.splitext(f.obj_name)[1][1:].lower() file_type = FILEEXT_TYPE_MAP.get(file_ext) if file_type == IMAGE: f_['is_img'] = True if file_type == VIDEO and ENABLE_VIDEO_THUMBNAIL: f_['is_video'] = True if file_type == IMAGE or file_type == VIDEO and ENABLE_VIDEO_THUMBNAIL: thumbnail_file_path = os.path.join( THUMBNAIL_ROOT, str(size), f.obj_id) thumbnail_exist = os.path.exists(thumbnail_file_path) if thumbnail_exist: src = get_thumbnail_src(f.repo_id, size, f.fullpath) f_['encoded_thumbnail_src'] = urlquote(src) dirent_list.append(f_) return Response(dirent_list, status=status.HTTP_200_OK)
def view_file_via_shared_dir(request, token): assert token is not None # Checked by URLconf fileshare = FileShare.objects.get_valid_file_link_by_token(token) if fileshare is None: raise Http404 shared_by = fileshare.username repo_id = fileshare.repo_id repo = get_repo(repo_id) if not repo: raise Http404 path = request.GET.get('p', '').rstrip('/') if not path: raise Http404 if not path.startswith(fileshare.path): # Can not view upper dir of shared dir raise Http404 zipped = gen_path_link(path, '') obj_id = seafile_api.get_file_id_by_path(repo_id, path) if not obj_id: return render_error(request, _(u'File does not exist')) file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) filename = os.path.basename(path) filetype, fileext = get_file_type_and_ext(filename) access_token = seafserv_rpc.web_get_access_token(repo.id, obj_id, 'view', '') raw_path = gen_file_get_url(access_token, filename) inner_path = gen_inner_file_get_url(access_token, filename) img_prev = None img_next = None # get file content ret_dict = {'err': '', 'file_content': '', 'encoding': '', 'file_enc': '', 'file_encoding_list': [], 'html_exists': False, 'filetype': filetype} exceeds_limit, err_msg = file_size_exceeds_preview_limit(file_size, filetype) if exceeds_limit: ret_dict['err'] = err_msg else: """Choose different approach when dealing with different type of file.""" if is_textual_file(file_type=filetype): handle_textual_file(request, filetype, inner_path, ret_dict) elif filetype == DOCUMENT: handle_document(inner_path, obj_id, fileext, ret_dict) elif filetype == SPREADSHEET: handle_spreadsheet(inner_path, obj_id, fileext, ret_dict) elif filetype == PDF: handle_pdf(inner_path, obj_id, fileext, ret_dict) elif filetype == IMAGE: current_commit = get_commits(repo_id, 0, 1)[0] parent_dir = os.path.dirname(path) dirs = seafile_api.list_dir_by_commit_and_path(current_commit.repo_id, current_commit.id, parent_dir) if not dirs: raise Http404 img_list = [] for dirent in dirs: if not stat.S_ISDIR(dirent.props.mode): fltype, flext = get_file_type_and_ext(dirent.obj_name) if fltype == 'Image': img_list.append(dirent.obj_name) if len(img_list) > 1: img_list.sort(lambda x, y : cmp(x.lower(), y.lower())) cur_img_index = img_list.index(filename) if cur_img_index != 0: img_prev = posixpath.join(parent_dir, img_list[cur_img_index - 1]) if cur_img_index != len(img_list) - 1: img_next = posixpath.join(parent_dir, img_list[cur_img_index + 1]) # send statistic messages if ret_dict['filetype'] != 'Unknown': try: send_message('seahub.stats', 'file-view\t%s\t%s\t%s\t%s' % \ (repo.id, shared_by, obj_id, file_size)) except SearpcError, e: logger.error('Error when sending file-view message: %s' % str(e))
def post(self, request): """ Copy/move file/dir, and return task id. Permission checking: 1. move: user with 'rw' permission for current file, 'rw' permission for dst parent dir; 2. copy: user with 'r' permission for current file, 'rw' permission for dst parent dir; """ src_repo_id = request.data.get('src_repo_id', None) src_parent_dir = request.data.get('src_parent_dir', None) src_dirent_name = request.data.get('src_dirent_name', None) dst_repo_id = request.data.get('dst_repo_id', None) dst_parent_dir = request.data.get('dst_parent_dir', None) operation = request.data.get('operation', None) dirent_type = request.data.get('dirent_type', None) # argument check if not src_repo_id: error_msg = 'src_repo_id invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not src_parent_dir: error_msg = 'src_parent_dir invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not src_dirent_name: error_msg = 'src_dirent_name invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not dst_repo_id: error_msg = 'dst_repo_id invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not dst_parent_dir: error_msg = 'dst_parent_dir invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not operation: error_msg = 'operation invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not dirent_type: error_msg = 'dirent_type invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if len(dst_parent_dir + src_dirent_name) > MAX_PATH: error_msg = _('Destination path is too long.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) operation = operation.lower() if operation not in ('move', 'copy'): error_msg = "operation can only be 'move' or 'copy'." return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if operation == 'move': if src_repo_id == dst_repo_id and src_parent_dir == dst_parent_dir: error_msg = _('Invalid destination path') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) dirent_type = dirent_type.lower() if dirent_type not in ('file', 'dir'): error_msg = "operation can only be 'file' or 'dir'." return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # src resource check src_repo = seafile_api.get_repo(src_repo_id) if not src_repo: error_msg = 'Library %s not found.' % src_repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) src_dirent_path = posixpath.join(src_parent_dir, src_dirent_name) file_id = None if dirent_type == 'file': file_id = seafile_api.get_file_id_by_path(src_repo_id, src_dirent_path) if not file_id: error_msg = 'File %s not found.' % src_dirent_path return api_error(status.HTTP_404_NOT_FOUND, error_msg) dir_id = None if dirent_type == 'dir': dir_id = seafile_api.get_dir_id_by_path(src_repo_id, src_dirent_path) if not dir_id: error_msg = 'Folder %s not found.' % src_dirent_path return api_error(status.HTTP_404_NOT_FOUND, error_msg) # dst resource check dst_repo = seafile_api.get_repo(dst_repo_id) if not dst_repo: error_msg = 'Library %s not found.' % dst_repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) if not seafile_api.get_dir_id_by_path(dst_repo_id, dst_parent_dir): error_msg = 'Folder %s not found.' % dst_parent_dir return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check for dst parent dir if check_folder_permission(request, dst_repo_id, dst_parent_dir) != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) if operation == 'copy' or \ operation == 'move' and \ get_repo_owner(request, src_repo_id) != get_repo_owner(request, dst_repo_id): current_size = 0 if file_id: current_size = seafile_api.get_file_size(src_repo.store_id, src_repo.version, file_id) if dir_id: current_size = seafile_api.get_dir_size(src_repo.store_id, src_repo.version, dir_id) # check if above quota for dst repo if seafile_api.check_quota(dst_repo_id, current_size) < 0: return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota.")) new_dirent_name = check_filename_with_rename(dst_repo_id, dst_parent_dir, src_dirent_name) username = request.user.username if operation == 'move': # permission check for src parent dir if check_folder_permission(request, src_repo_id, src_parent_dir) != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) if dirent_type == 'dir' and src_repo_id == dst_repo_id and \ dst_parent_dir.startswith(src_dirent_path + '/'): error_msg = _('Can not move directory %(src)s to its subdirectory %(des)s') \ % {'src': escape(src_dirent_path), 'des': escape(dst_parent_dir)} return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if dirent_type == 'file': # check file lock try: is_locked, locked_by_me = check_file_lock(src_repo_id, src_dirent_path, username) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if is_locked and not locked_by_me: error_msg = _("File is locked") return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: res = seafile_api.move_file(src_repo_id, src_parent_dir, src_dirent_name, dst_repo_id, dst_parent_dir, new_dirent_name, replace=False, username=username, need_progress=1) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if operation == 'copy': # permission check for src parent dir if parse_repo_perm(check_folder_permission( request, src_repo_id, src_parent_dir)).can_copy is False: error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: res = seafile_api.copy_file(src_repo_id, src_parent_dir, src_dirent_name, dst_repo_id, dst_parent_dir, new_dirent_name, username=username, need_progress=1) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if not res: error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) result = {} if res.background: result['task_id'] = res.task_id return Response(result)
def user_info(request, email): owned_repos = seafile_api.get_owned_repo_list(email) org = ccnet_threaded_rpc.get_orgs_by_user(email) org_name = None if not org: space_usage = seafile_api.get_user_self_usage(email) space_quota = seafile_api.get_user_quota(email) if CALC_SHARE_USAGE: share_usage = seafile_api.get_user_share_usage(email) share_quota = seafile_api.get_user_share_quota(email) else: share_quota = share_usage = 0 else: org_id = org[0].org_id org_name = org[0].org_name space_usage = seafserv_threaded_rpc.get_org_user_quota_usage( org_id, email) space_quota = seafserv_threaded_rpc.get_org_user_quota(org_id, email) share_usage = share_quota = 0 # Repos that are share to user in_repos = seafile_api.get_share_in_repo_list(email, -1, -1) # get user profile profile = Profile.objects.get_profile_by_user(email) d_profile = DetailedProfile.objects.get_detailed_profile_by_user(email) user_shared_links = [] # download links p_fileshares = [] fileshares = list(FileShare.objects.filter(username=email)) for fs in fileshares: r = seafile_api.get_repo(fs.repo_id) if not r: fs.delete() continue if fs.is_file_share_link(): if seafile_api.get_file_id_by_path(r.id, fs.path) is None: fs.delete() continue fs.filename = os.path.basename(fs.path) path = fs.path.rstrip('/') # Normalize file path obj_id = seafile_api.get_file_id_by_path(r.id, path) fs.file_size = seafile_api.get_file_size(r.store_id, r.version, obj_id) else: if seafile_api.get_dir_id_by_path(r.id, fs.path) is None: fs.delete() continue fs.filename = os.path.basename(fs.path.rstrip('/')) path = fs.path if path[-1] != '/': # Normalize dir path path += '/' # get dir size dir_id = seafserv_threaded_rpc.get_dirid_by_path( r.id, r.head_cmmt_id, path) fs.dir_size = seafserv_threaded_rpc.get_dir_size( r.store_id, r.version, dir_id) fs.is_download = True p_fileshares.append(fs) p_fileshares.sort(key=lambda x: x.view_cnt, reverse=True) user_shared_links += p_fileshares # upload links uploadlinks = list(UploadLinkShare.objects.filter(username=email)) p_uploadlinks = [] for link in uploadlinks: r = seafile_api.get_repo(link.repo_id) if not r: link.delete() continue if seafile_api.get_dir_id_by_path(r.id, link.path) is None: link.delete() continue link.dir_name = os.path.basename(link.path.rstrip('/')) link.is_upload = True p_uploadlinks.append(link) p_uploadlinks.sort(key=lambda x: x.view_cnt, reverse=True) user_shared_links += p_uploadlinks return render_to_response('sysadmin/userinfo.html', { 'owned_repos': owned_repos, 'space_quota': space_quota, 'space_usage': space_usage, 'share_quota': share_quota, 'share_usage': share_usage, 'CALC_SHARE_USAGE': CALC_SHARE_USAGE, 'in_repos': in_repos, 'email': email, 'profile': profile, 'd_profile': d_profile, 'org_name': org_name, 'user_shared_links': user_shared_links, }, context_instance=RequestContext(request))
p_fileshares = [] fileshares = list(FileShare.objects.filter(username=email)) for fs in fileshares: r = seafile_api.get_repo(fs.repo_id) if not r: fs.delete() continue if fs.is_file_share_link(): if seafile_api.get_file_id_by_path(r.id, fs.path) is None: fs.delete() continue fs.filename = os.path.basename(fs.path) path = fs.path.rstrip('/') # Normalize file path obj_id = seafile_api.get_file_id_by_path(r.id, path) fs.file_size = seafile_api.get_file_size(r.store_id, r.version, obj_id) else: if seafile_api.get_dir_id_by_path(r.id, fs.path) is None: fs.delete() continue fs.filename = os.path.basename(fs.path.rstrip('/')) path = fs.path if path[-1] != '/': # Normalize dir path path += '/' # get dir size dir_id = seafserv_threaded_rpc.get_dirid_by_path(r.id, r.head_cmmt_id, path) fs.dir_size = seafserv_threaded_rpc.get_dir_size(r.store_id, r.version, dir_id)
def get(self, request, file_id, format=None): """ WOPI endpoint for check file info """ token = request.GET.get('access_token', None) request_user, repo_id, file_path = get_file_info_by_token(token) repo = seafile_api.get_repo(repo_id) obj_id = seafile_api.get_file_id_by_path(repo_id, file_path) try: file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) except SearpcError as e: logger.error(e) return HttpResponse(json.dumps({}), status=500, content_type=json_content_type) if file_size == -1: logger.error('File %s not found.') % file_path return HttpResponse(json.dumps({}), status=401, content_type=json_content_type) result = {} # necessary result['BaseFileName'] = os.path.basename(file_path) result['OwnerId'] = seafile_api.get_repo_owner(repo_id) result['Size'] = file_size result['UserId'] = request_user result['Version'] = obj_id # optional result['UserFriendlyName'] = email2nickname(request_user) absolute_uri = request.build_absolute_uri('/') result['PostMessageOrigin'] = urlparse.urljoin(absolute_uri, SITE_ROOT).strip('/') result['HidePrintOption'] = False result['HideSaveOption'] = False result['HideExportOption'] = False result['EnableOwnerTermination'] = True result['SupportsLocks'] = True result['SupportsGetLock'] = True result['SupportsUpdate'] = True filename = os.path.basename(file_path) filetype, fileext = get_file_type_and_ext(filename) is_locked, locked_by_me = check_file_lock(repo_id, file_path, request_user) perm = seafile_api.check_permission_by_path(repo_id, file_path, request_user) if ENABLE_OFFICE_WEB_APP_EDIT and not repo.encrypted and \ perm == 'rw' and ((not is_locked) or (is_locked and locked_by_me)) and \ fileext in OFFICE_WEB_APP_EDIT_FILE_EXTENSION: result['UserCanWrite'] = True return HttpResponse(json.dumps(result), status=200, content_type=json_content_type)
def view_file_via_shared_dir(request, token): assert token is not None # Checked by URLconf fileshare = FileShare.objects.get_valid_file_link_by_token(token) if fileshare is None: raise Http404 shared_by = fileshare.username repo_id = fileshare.repo_id repo = get_repo(repo_id) if not repo: raise Http404 path = request.GET.get('p', '').rstrip('/') if not path: raise Http404 if not path.startswith( fileshare.path): # Can not view upper dir of shared dir raise Http404 zipped = gen_path_link(path, '') obj_id = seafile_api.get_file_id_by_path(repo_id, path) if not obj_id: return render_error(request, _(u'File does not exist')) file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) filename = os.path.basename(path) filetype, fileext = get_file_type_and_ext(filename) access_token = seafserv_rpc.web_get_access_token(repo.id, obj_id, 'view', '') raw_path = gen_file_get_url(access_token, filename) inner_path = gen_inner_file_get_url(access_token, filename) img_prev = None img_next = None # get file content ret_dict = { 'err': '', 'file_content': '', 'encoding': '', 'file_enc': '', 'file_encoding_list': [], 'html_exists': False, 'filetype': filetype } exceeds_limit, err_msg = file_size_exceeds_preview_limit( file_size, filetype) if exceeds_limit: ret_dict['err'] = err_msg else: """Choose different approach when dealing with different type of file.""" if is_textual_file(file_type=filetype): handle_textual_file(request, filetype, inner_path, ret_dict) elif filetype == DOCUMENT: handle_document(inner_path, obj_id, fileext, ret_dict) elif filetype == SPREADSHEET: handle_spreadsheet(inner_path, obj_id, fileext, ret_dict) elif filetype == PDF: handle_pdf(inner_path, obj_id, fileext, ret_dict) elif filetype == IMAGE: current_commit = get_commits(repo_id, 0, 1)[0] parent_dir = os.path.dirname(path) dirs = seafile_api.list_dir_by_commit_and_path( current_commit.repo_id, current_commit.id, parent_dir) if not dirs: raise Http404 img_list = [] for dirent in dirs: if not stat.S_ISDIR(dirent.props.mode): fltype, flext = get_file_type_and_ext(dirent.obj_name) if fltype == 'Image': img_list.append(dirent.obj_name) if len(img_list) > 1: img_list.sort(lambda x, y: cmp(x.lower(), y.lower())) cur_img_index = img_list.index(filename) if cur_img_index != 0: img_prev = posixpath.join(parent_dir, img_list[cur_img_index - 1]) if cur_img_index != len(img_list) - 1: img_next = posixpath.join(parent_dir, img_list[cur_img_index + 1]) # send statistic messages if ret_dict['filetype'] != 'Unknown': try: send_message('seahub.stats', 'file-view\t%s\t%s\t%s\t%s' % \ (repo.id, shared_by, obj_id, file_size)) except SearpcError, e: logger.error('Error when sending file-view message: %s' % str(e))
def list_lib_dir(request, repo_id): ''' New ajax API for list library directory ''' content_type = 'application/json; charset=utf-8' result = {} repo = get_repo(repo_id) if not repo: err_msg = _('Library does not exist.') return HttpResponse(json.dumps({'error': err_msg}), status=400, content_type=content_type) username = request.user.username path = request.GET.get('p', '/') path = normalize_dir_path(path) dir_id = seafile_api.get_dir_id_by_path(repo.id, path) if not dir_id: err_msg = 'Folder not found.' return HttpResponse(json.dumps({'error': err_msg}), status=404, content_type=content_type) # perm for current dir user_perm = check_folder_permission(request, repo_id, path) if not user_perm: return convert_repo_path_when_can_not_view_folder( request, repo_id, path) if repo.encrypted \ and not seafile_api.is_password_set(repo.id, username): err_msg = _('Library is encrypted.') return HttpResponse(json.dumps({ 'error': err_msg, 'lib_need_decrypt': True }), status=403, content_type=content_type) head_commit = get_commit(repo.id, repo.version, repo.head_cmmt_id) if not head_commit: err_msg = _('Error: no head commit id') return HttpResponse(json.dumps({'error': err_msg}), status=500, content_type=content_type) dir_list = [] file_list = [] dirs = seafserv_threaded_rpc.list_dir_with_perm(repo_id, path, dir_id, username, -1, -1) starred_files = get_dir_starred_files(username, repo_id, path) for dirent in dirs: dirent.last_modified = dirent.mtime if stat.S_ISDIR(dirent.mode): dpath = posixpath.join(path, dirent.obj_name) if dpath[-1] != '/': dpath += '/' dir_list.append(dirent) else: if repo.version == 0: file_size = seafile_api.get_file_size(repo.store_id, repo.version, dirent.obj_id) else: file_size = dirent.size dirent.file_size = file_size if file_size else 0 dirent.starred = False fpath = posixpath.join(path, dirent.obj_name) if fpath in starred_files: dirent.starred = True file_list.append(dirent) if is_org_context(request): repo_owner = seafile_api.get_org_repo_owner(repo.id) else: repo_owner = seafile_api.get_repo_owner(repo.id) result["repo_owner"] = repo_owner result["is_repo_owner"] = False result["has_been_shared_out"] = False result["is_admin"] = is_repo_admin(username, repo_id) if repo_owner == username: result["is_repo_owner"] = True try: result["has_been_shared_out"] = repo_has_been_shared_out( request, repo_id) except Exception as e: logger.error(e) if result["is_admin"]: result["has_been_shared_out"] = True result["is_virtual"] = repo.is_virtual result["repo_name"] = repo.name result["user_perm"] = user_perm # check quota for fileupload result["no_quota"] = True if seaserv.check_quota(repo.id) < 0 else False result["encrypted"] = repo.encrypted dirent_list = [] for d in dir_list: d_ = {} d_['is_dir'] = True d_['obj_name'] = d.obj_name d_['last_modified'] = d.last_modified d_['last_update'] = translate_seahub_time(d.last_modified) d_['p_dpath'] = posixpath.join(path, d.obj_name) d_['perm'] = d.permission # perm for sub dir in current dir dirent_list.append(d_) size = int(request.GET.get('thumbnail_size', THUMBNAIL_DEFAULT_SIZE)) for f in file_list: f_ = {} f_['is_file'] = True f_['obj_name'] = f.obj_name f_['last_modified'] = f.last_modified f_['last_update'] = translate_seahub_time(f.last_modified) f_['starred'] = f.starred f_['file_size'] = filesizeformat(f.file_size) f_['obj_id'] = f.obj_id f_['perm'] = f.permission # perm for file in current dir if not repo.encrypted and ENABLE_THUMBNAIL: # used for providing a way to determine # if send a request to create thumbnail. fileExt = os.path.splitext(f.obj_name)[1][1:].lower() file_type = FILEEXT_TYPE_MAP.get(fileExt) if file_type == IMAGE: f_['is_img'] = True if file_type == VIDEO and ENABLE_VIDEO_THUMBNAIL: f_['is_video'] = True if file_type == XMIND: f_['is_xmind'] = True if file_type in (IMAGE, XMIND) or \ file_type == VIDEO and ENABLE_VIDEO_THUMBNAIL: # if thumbnail has already been created, return its src. # Then web browser will use this src to get thumbnail instead of # recreating it. thumbnail_file_path = os.path.join(THUMBNAIL_ROOT, str(size), f.obj_id) thumbnail_exist = os.path.exists(thumbnail_file_path) if thumbnail_exist: file_path = posixpath.join(path, f.obj_name) src = get_thumbnail_src(repo_id, size, file_path) f_['encoded_thumbnail_src'] = urlquote(src) if is_pro_version(): f_['is_locked'] = True if f.is_locked else False f_['lock_owner'] = f.lock_owner f_['lock_owner_name'] = email2nickname(f.lock_owner) f_['locked_by_me'] = False if f.lock_owner == username: f_['locked_by_me'] = True if f.lock_owner == ONLINE_OFFICE_LOCK_OWNER and \ user_perm == PERMISSION_READ_WRITE: f_['locked_by_me'] = True dirent_list.append(f_) result["dirent_list"] = dirent_list return HttpResponse(json.dumps(result), content_type=content_type)
def get(self, request, file_id, format=None): """ WOPI endpoint for check file info """ token = request.GET.get('access_token', None) info_dict = get_file_info_by_token(token) request_user = info_dict['request_user'] repo_id = info_dict['repo_id'] file_path = info_dict['file_path'] obj_id = info_dict['obj_id'] can_edit = info_dict['can_edit'] can_download = info_dict['can_download'] repo = seafile_api.get_repo(repo_id) if not obj_id: # if not cache file obj_id, then get it from seafile_api obj_id = seafile_api.get_file_id_by_path(repo_id, file_path) try: file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) except SearpcError as e: logger.error(e) return HttpResponse(json.dumps({}), status=500, content_type=json_content_type) if file_size == -1: logger.error('File %s not found.' % file_path) return HttpResponse(json.dumps({}), status=401, content_type=json_content_type) result = {} # necessary result['BaseFileName'] = os.path.basename(file_path) result['Size'] = file_size result['UserId'] = request_user result['Version'] = obj_id result['LastModifiedTime'] = '' try: if is_pro_version(): result['OwnerId'] = seafile_api.get_repo_owner(repo_id) or \ seafile_api.get_org_repo_owner(repo_id) else: result['OwnerId'] = seafile_api.get_repo_owner(repo_id) dirent = seafile_api.get_dirent_by_path(repo_id, file_path) if dirent: last_modified = datetime.datetime.utcfromtimestamp( dirent.mtime) result['LastModifiedTime'] = last_modified.isoformat() except Exception as e: logger.error(e) return HttpResponse(json.dumps({}), status=500, content_type=json_content_type) # optional if request_user != ANONYMOUS_EMAIL: result['UserFriendlyName'] = email2nickname(request_user) result['IsAnonymousUser'] = False else: result['IsAnonymousUser'] = True absolute_uri = request.build_absolute_uri('/') result['PostMessageOrigin'] = urllib.parse.urljoin( absolute_uri, SITE_ROOT).strip('/') result['HideSaveOption'] = True result['HideExportOption'] = True result['EnableOwnerTermination'] = True result['SupportsLocks'] = True result['SupportsGetLock'] = True result['DisablePrint'] = True if not can_download else False result['HidePrintOption'] = True if not can_download else False result['SupportsUpdate'] = True if can_edit else False result['UserCanWrite'] = True if can_edit else False result['ReadOnly'] = True if not can_edit else False # new file creation feature is not implemented on wopi host(seahub) # hide save as button on view/edit file page result['UserCanNotWriteRelative'] = True return HttpResponse(json.dumps(result), status=200, content_type=json_content_type)
def view_shared_file(request, token): """ Preview file via shared link. """ assert token is not None # Checked by URLconf fileshare = FileShare.objects.get_valid_file_link_by_token(token) if fileshare is None: raise Http404 if fileshare.is_encrypted(): if not check_share_link_access(request.user.username, token): d = {'token': token, 'view_name': 'view_shared_file', } if request.method == 'POST': post_values = request.POST.copy() post_values['enc_password'] = fileshare.password form = SharedLinkPasswordForm(post_values) d['form'] = form if form.is_valid(): # set cache for non-anonymous user if request.user.is_authenticated(): set_share_link_access(request.user.username, token) else: return render_to_response('share_access_validation.html', d, context_instance=RequestContext(request)) else: return render_to_response('share_access_validation.html', d, context_instance=RequestContext(request)) shared_by = fileshare.username repo_id = fileshare.repo_id repo = get_repo(repo_id) if not repo: raise Http404 path = fileshare.path.rstrip('/') # Normalize file path obj_id = seafile_api.get_file_id_by_path(repo_id, path) if not obj_id: return render_error(request, _(u'File does not exist')) file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) filename = os.path.basename(path) filetype, fileext = get_file_type_and_ext(filename) access_token = seafserv_rpc.web_get_access_token(repo.id, obj_id, 'view', '') raw_path = gen_file_get_url(access_token, filename) inner_path = gen_inner_file_get_url(access_token, filename) # get file content ret_dict = {'err': '', 'file_content': '', 'encoding': '', 'file_enc': '', 'file_encoding_list': [], 'html_exists': False, 'filetype': filetype} exceeds_limit, err_msg = file_size_exceeds_preview_limit(file_size, filetype) if exceeds_limit: ret_dict['err'] = err_msg else: """Choose different approach when dealing with different type of file.""" if is_textual_file(file_type=filetype): handle_textual_file(request, filetype, inner_path, ret_dict) elif filetype == DOCUMENT: handle_document(inner_path, obj_id, fileext, ret_dict) elif filetype == SPREADSHEET: handle_spreadsheet(inner_path, obj_id, fileext, ret_dict) elif filetype == OPENDOCUMENT: if file_size == 0: ret_dict['err'] = _(u'Invalid file format.') elif filetype == PDF: handle_pdf(inner_path, obj_id, fileext, ret_dict) # Increase file shared link view_cnt, this operation should be atomic fileshare.view_cnt = F('view_cnt') + 1 fileshare.save() # send statistic messages if ret_dict['filetype'] != 'Unknown': try: send_message('seahub.stats', 'file-view\t%s\t%s\t%s\t%s' % \ (repo.id, shared_by, obj_id, file_size)) except SearpcError, e: logger.error('Error when sending file-view message: %s' % str(e))
def view_shared_file(request, token): """ Preview file via shared link. """ assert token is not None # Checked by URLconf fileshare = FileShare.objects.get_valid_file_link_by_token(token) if fileshare is None: raise Http404 if fileshare.is_encrypted(): if not check_share_link_access(request.user.username, token): d = { 'token': token, 'view_name': 'view_shared_file', } if request.method == 'POST': post_values = request.POST.copy() post_values['enc_password'] = fileshare.password form = SharedLinkPasswordForm(post_values) d['form'] = form if form.is_valid(): # set cache for non-anonymous user if request.user.is_authenticated(): set_share_link_access(request.user.username, token) else: return render_to_response( 'share_access_validation.html', d, context_instance=RequestContext(request)) else: return render_to_response( 'share_access_validation.html', d, context_instance=RequestContext(request)) shared_by = fileshare.username repo_id = fileshare.repo_id repo = get_repo(repo_id) if not repo: raise Http404 path = fileshare.path.rstrip('/') # Normalize file path obj_id = seafile_api.get_file_id_by_path(repo_id, path) if not obj_id: return render_error(request, _(u'File does not exist')) file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id) filename = os.path.basename(path) filetype, fileext = get_file_type_and_ext(filename) access_token = seafserv_rpc.web_get_access_token(repo.id, obj_id, 'view', '') raw_path = gen_file_get_url(access_token, filename) inner_path = gen_inner_file_get_url(access_token, filename) # get file content ret_dict = { 'err': '', 'file_content': '', 'encoding': '', 'file_enc': '', 'file_encoding_list': [], 'html_exists': False, 'filetype': filetype } exceeds_limit, err_msg = file_size_exceeds_preview_limit( file_size, filetype) if exceeds_limit: ret_dict['err'] = err_msg else: """Choose different approach when dealing with different type of file.""" if is_textual_file(file_type=filetype): handle_textual_file(request, filetype, inner_path, ret_dict) elif filetype == DOCUMENT: handle_document(inner_path, obj_id, fileext, ret_dict) elif filetype == SPREADSHEET: handle_spreadsheet(inner_path, obj_id, fileext, ret_dict) elif filetype == OPENDOCUMENT: if file_size == 0: ret_dict['err'] = _(u'Invalid file format.') elif filetype == PDF: handle_pdf(inner_path, obj_id, fileext, ret_dict) # Increase file shared link view_cnt, this operation should be atomic fileshare.view_cnt = F('view_cnt') + 1 fileshare.save() # send statistic messages if ret_dict['filetype'] != 'Unknown': try: send_message('seahub.stats', 'file-view\t%s\t%s\t%s\t%s' % \ (repo.id, shared_by, obj_id, file_size)) except SearpcError, e: logger.error('Error when sending file-view message: %s' % str(e))
def test_file_property_and_dir_listing(): t_repo_version = 1 t_repo_id = api.create_repo('test_file_property_and_dir_listing', '', USER, passwd=None) create_the_file() api.post_file(t_repo_id, file_path, '/', file_name, USER) api.post_dir(t_repo_id, '/', dir_name, USER) api.post_file(t_repo_id, file_path, '/' + dir_name, file_name, USER) #test is_valid_filename t_valid_file_name = 'valid_filename' t_invalid_file_name = '/invalid_filename' assert api.is_valid_filename(t_repo_id, t_valid_file_name) assert api.is_valid_filename(t_repo_id, t_invalid_file_name) == 0 #test get_file_id_by_path t_file_id = api.get_file_id_by_path(t_repo_id, '/test.txt') assert t_file_id #test get_dir_id_by_path t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir') assert t_dir_id #test get_file_size t_file_size = len(file_content) assert t_file_size == api.get_file_size(t_repo_id, t_repo_version, t_file_id) #test get_dir_size t_dir_size = len(file_content) assert t_dir_size == api.get_dir_size(t_repo_id, t_repo_version, t_dir_id) #test get_file_count_info_by_path t_file_count_info = api.get_file_count_info_by_path(t_repo_id, '/') assert t_file_count_info.file_count == 2 assert t_file_count_info.dir_count == 1 assert t_file_count_info.size == t_file_size + t_dir_size #test get_file_id_by_commit_and_path t_file_id_tmp = t_file_id t_repo = api.get_repo(t_repo_id) assert t_repo t_commit_id = t_repo.head_cmmt_id t_file_id = api.get_file_id_by_commit_and_path(t_repo_id, t_commit_id, '/test.txt') assert t_file_id == t_file_id_tmp #test get_dirent_by_path std_file_mode = 0100000 | 0644 t_dirent_obj = api.get_dirent_by_path(t_repo_id, '/test.txt') assert t_dirent_obj assert t_dirent_obj.obj_id == t_file_id assert t_dirent_obj.obj_name == 'test.txt' assert t_dirent_obj.mode == std_file_mode assert t_dirent_obj.version == t_repo_version assert t_dirent_obj.size == t_file_size assert t_dirent_obj.modifier == USER #test list_file_by_file_id t_block_list = api.list_file_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_blocks_by_file_id t_block_list = api.list_blocks_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_dir_by_dir_id t_dir_list = api.list_dir_by_dir_id(t_repo_id, t_dir_id) assert len(t_dir_list) == 1 #test list_dir_by_path t_dir_list = api.list_dir_by_path(t_repo_id, '/test_dir') assert len(t_dir_list) == 1 #test get_dir_id_by_commit_and_path t_dir_id = api.get_dir_id_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert t_dir_id #test list_dir_by_commit_and_path t_dir_list = api.list_dir_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert len(t_dir_list) == 1 #test list_dir_with_perm t_dir_list = api.list_dir_with_perm(t_repo_id, '/test_dir', t_dir_id, USER) assert len(t_dir_list) == 1 #test mkdir_with_parent api.mkdir_with_parents(t_repo_id, '/test_dir', 'test_subdir', USER) t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir/test_subdir') assert t_dir_id #test get_total_storage t_total_size = api.get_total_storage() t_repo_size = api.get_repo_size(t_repo_id) assert t_total_size == t_repo_size #get_total_file_number time.sleep(1) assert api.get_total_file_number() == 2 api.remove_repo(t_repo_id)
def get_file_size(store_id, repo_version, file_id): size = seafile_api.get_file_size(store_id, repo_version, file_id) return size if size else 0
def test_file_operation(): t_repo_version = 1 t_repo_id1 = api.create_repo('test_file_operation1', '', USER, passwd=None) create_the_file() # test post_file assert api.post_file(t_repo_id1, file_path, '/', file_name, USER) == 0 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + file_name) t_file_size = len(file_content) assert t_file_size == api.get_file_size(t_repo_id1, t_repo_version, t_file_id) # test post_dir assert api.post_dir(t_repo_id1, '/', dir_name, USER) == 0 # test copy_file (synchronize) t_copy_file_result1 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id1, '/', new_file_name, USER, 0, 1) assert t_copy_file_result1 assert t_copy_file_result1.task_id is None assert not t_copy_file_result1.background t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + new_file_name) assert t_file_size == api.get_file_size(t_repo_id1, t_repo_version, t_file_id) # test copy_file (asynchronous) t_repo_id2 = api.create_repo('test_file_operation2', '', USER, passwd=None) usage = api.get_user_self_usage(USER) api.set_user_quota(USER, usage + 1) t_copy_file_result2 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id2, '/', file_name, USER, 1, 0) assert t_copy_file_result2 assert t_copy_file_result2.background while True: time.sleep(0.1) t_copy_task = api.get_copy_task(t_copy_file_result2.task_id) assert t_copy_task.failed assert t_copy_task.failed_reason == 'Quota is full' if t_copy_task.failed: break api.set_user_quota(USER, -1) t_copy_file_result2 = api.copy_file(t_repo_id1, '/', file_name, t_repo_id2, '/', file_name, USER, 1, 0) assert t_copy_file_result2 assert t_copy_file_result2.task_id assert t_copy_file_result2.background while True: time.sleep(0.1) t_copy_task = api.get_copy_task(t_copy_file_result2.task_id) if t_copy_task.successful: break t_file_id = api.get_file_id_by_path(t_repo_id2, '/' + file_name) assert t_file_size == api.get_file_size(t_repo_id2, t_repo_version, t_file_id) # test move_file (synchronize) t_move_file_info1 = api.get_dirent_by_path(t_repo_id1, '/' + new_file_name) t_move_file_result1 = api.move_file(t_repo_id1, '/', new_file_name, t_repo_id1, '/' + dir_name, new_file_name, 1, USER, 0, 1) assert t_move_file_result1 t_move_file_info2 = api.get_dirent_by_path( t_repo_id1, '/' + dir_name + '/' + new_file_name) assert t_move_file_info1.mtime == t_move_file_info2.mtime t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + new_file_name) assert t_file_id is None # test move_file (synchronize) t_move_file_result1 = api.move_file(t_repo_id1, '/' + dir_name, new_file_name, t_repo_id1, '/', new_file_name_2, 1, USER, 0, 1) assert t_move_file_result1 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + dir_name + '/' + new_file_name) assert t_file_id is None # test move_file (asynchronous) usage = api.get_user_self_usage(USER) api.set_user_quota(USER, usage + 1) t_move_file_result2 = api.move_file(t_repo_id1, '/', file_name, t_repo_id2, '/', new_file_name, 1, USER, 1, 0) assert t_move_file_result2 assert t_move_file_result2.task_id assert t_move_file_result2.background while True: time.sleep(0.1) t_move_task = api.get_copy_task(t_move_file_result2.task_id) assert t_move_task.failed assert t_move_task.failed_reason == 'Quota is full' if t_move_task.failed: break api.set_user_quota(USER, -1) t_move_file_result2 = api.move_file(t_repo_id1, '/', file_name, t_repo_id2, '/', new_file_name, 1, USER, 1, 0) assert t_move_file_result2 assert t_move_file_result2.task_id assert t_move_file_result2.background while True: time.sleep(0.1) t_move_task = api.get_copy_task(t_move_file_result2.task_id) if t_move_task.successful: break t_file_id = api.get_file_id_by_path(t_repo_id2, '/' + new_file_name) assert t_file_size == api.get_file_size(t_repo_id2, t_repo_version, t_file_id) # test post_empty_file assert api.post_empty_file(t_repo_id1, '/' + dir_name, empty_file_name, USER) == 0 t_file_id = api.get_file_id_by_path(t_repo_id1, '/' + dir_name + '/' + empty_file_name) assert api.get_file_size(t_repo_id1, t_repo_version, t_file_id) == 0 # test rename_file assert api.rename_file(t_repo_id1, '/' + dir_name, empty_file_name, new_empty_file_name, USER) == 0 #test put_file t_new_file_id = api.put_file(t_repo_id1, file_path, '/' + dir_name, new_empty_file_name, USER, None) assert t_new_file_id # test get_file_revisions t_commit_list = api.get_file_revisions(t_repo_id2, None, '/' + file_name, 2) assert t_commit_list assert len(t_commit_list) == 2 assert t_commit_list[0].creator_name == USER # test del_file assert api.del_file(t_repo_id2, '/', file_name, USER) == 0 # test get_deleted t_deleted_file_list = api.get_deleted(t_repo_id2, 1) assert t_deleted_file_list assert len(t_deleted_file_list) == 2 assert t_deleted_file_list[0].obj_name == file_name assert t_deleted_file_list[0].basedir == '/' # test del a non-exist file. should return 0. assert api.del_file(t_repo_id2, '/', file_name, USER) == 0 assert api.del_file(t_repo_id1, '/' + dir_name, new_empty_file_name, USER) == 0 assert api.del_file(t_repo_id1, '/' + dir_name, new_file_name, USER) == 0 assert api.del_file(t_repo_id2, '/', new_file_name, USER) == 0 assert api.del_file(t_repo_id1, '/', new_file_name_2, USER) == 0 time.sleep(1) api.remove_repo(t_repo_id1)
def user_info(request, email): owned_repos = seafile_api.get_owned_repo_list(email) org = ccnet_threaded_rpc.get_orgs_by_user(email) org_name = None if not org: space_usage = seafile_api.get_user_self_usage(email) space_quota = seafile_api.get_user_quota(email) if CALC_SHARE_USAGE: share_usage = seafile_api.get_user_share_usage(email) share_quota = seafile_api.get_user_share_quota(email) else: share_quota = share_usage = 0 else: org_id = org[0].org_id org_name = org[0].org_name space_usage = seafserv_threaded_rpc.get_org_user_quota_usage(org_id, email) space_quota = seafserv_threaded_rpc.get_org_user_quota(org_id, email) share_usage = share_quota = 0 # Repos that are share to user in_repos = seafile_api.get_share_in_repo_list(email, -1, -1) # get user profile profile = Profile.objects.get_profile_by_user(email) d_profile = DetailedProfile.objects.get_detailed_profile_by_user(email) user_shared_links = [] # download links p_fileshares = [] fileshares = list(FileShare.objects.filter(username=email)) for fs in fileshares: r = seafile_api.get_repo(fs.repo_id) if not r: fs.delete() continue if fs.is_file_share_link(): if seafile_api.get_file_id_by_path(r.id, fs.path) is None: fs.delete() continue fs.filename = os.path.basename(fs.path) path = fs.path.rstrip('/') # Normalize file path obj_id = seafile_api.get_file_id_by_path(r.id, path) fs.file_size = seafile_api.get_file_size(r.store_id, r.version, obj_id) else: if seafile_api.get_dir_id_by_path(r.id, fs.path) is None: fs.delete() continue fs.filename = os.path.basename(fs.path.rstrip('/')) path = fs.path if path[-1] != '/': # Normalize dir path path += '/' # get dir size dir_id = seafserv_threaded_rpc.get_dirid_by_path(r.id, r.head_cmmt_id, path) fs.dir_size = seafserv_threaded_rpc.get_dir_size(r.store_id, r.version, dir_id) fs.is_download = True p_fileshares.append(fs) p_fileshares.sort(key=lambda x: x.view_cnt, reverse=True) user_shared_links += p_fileshares # upload links uploadlinks = list(UploadLinkShare.objects.filter(username=email)) p_uploadlinks = [] for link in uploadlinks: r = seafile_api.get_repo(link.repo_id) if not r: link.delete() continue if seafile_api.get_dir_id_by_path(r.id, link.path) is None: link.delete() continue link.dir_name = os.path.basename(link.path.rstrip('/')) link.is_upload = True p_uploadlinks.append(link) p_uploadlinks.sort(key=lambda x: x.view_cnt, reverse=True) user_shared_links += p_uploadlinks return render_to_response( 'sysadmin/userinfo.html', { 'owned_repos': owned_repos, 'space_quota': space_quota, 'space_usage': space_usage, 'share_quota': share_quota, 'share_usage': share_usage, 'CALC_SHARE_USAGE': CALC_SHARE_USAGE, 'in_repos': in_repos, 'email': email, 'profile': profile, 'd_profile': d_profile, 'org_name': org_name, 'user_shared_links': user_shared_links, }, context_instance=RequestContext(request))
def post(self, request): """ Copy/move file/dir, and return task id. Permission checking: 1. move: user with 'rw' permission for current file, 'rw' permission for dst parent dir; 2. copy: user with 'r' permission for current file, 'rw' permission for dst parent dir; """ src_repo_id = request.data.get('src_repo_id', None) src_parent_dir = request.data.get('src_parent_dir', None) src_dirent_name = request.data.get('src_dirent_name', None) dst_repo_id = request.data.get('dst_repo_id', None) dst_parent_dir = request.data.get('dst_parent_dir', None) operation = request.data.get('operation', None) dirent_type = request.data.get('dirent_type', None) # argument check if not src_repo_id: error_msg = 'src_repo_id invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not src_parent_dir: error_msg = 'src_parent_dir invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not src_dirent_name: error_msg = 'src_dirent_name invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not dst_repo_id: error_msg = 'dst_repo_id invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not dst_parent_dir: error_msg = 'dst_parent_dir invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not operation: error_msg = 'operation invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if not dirent_type: error_msg = 'dirent_type invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if len(dst_parent_dir + src_dirent_name) > MAX_PATH: error_msg = _('Destination path is too long.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) operation = operation.lower() if operation not in ('move', 'copy'): error_msg = "operation can only be 'move' or 'copy'." return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if operation == 'move': if src_repo_id == dst_repo_id and src_parent_dir == dst_parent_dir: error_msg = _('Invalid destination path') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) dirent_type = dirent_type.lower() if dirent_type not in ('file', 'dir'): error_msg = "operation can only be 'file' or 'dir'." return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # src resource check src_repo = seafile_api.get_repo(src_repo_id) if not src_repo: error_msg = 'Library %s not found.' % src_repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) src_dirent_path = posixpath.join(src_parent_dir, src_dirent_name) file_id = None if dirent_type == 'file': file_id = seafile_api.get_file_id_by_path(src_repo_id, src_dirent_path) if not file_id: error_msg = 'File %s not found.' % src_dirent_path return api_error(status.HTTP_404_NOT_FOUND, error_msg) dir_id = None if dirent_type == 'dir': dir_id = seafile_api.get_dir_id_by_path(src_repo_id, src_dirent_path) if not dir_id: error_msg = 'Folder %s not found.' % src_dirent_path return api_error(status.HTTP_404_NOT_FOUND, error_msg) # dst resource check dst_repo = seafile_api.get_repo(dst_repo_id) if not dst_repo: error_msg = 'Library %s not found.' % dst_repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) if not seafile_api.get_dir_id_by_path(dst_repo_id, dst_parent_dir): error_msg = 'Folder %s not found.' % dst_parent_dir return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check for dst parent dir if check_folder_permission(request, dst_repo_id, dst_parent_dir) != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) if operation == 'copy' or \ operation == 'move' and \ get_repo_owner(request, src_repo_id) != get_repo_owner(request, dst_repo_id): current_size = 0 if file_id: current_size = seafile_api.get_file_size(src_repo.store_id, src_repo.version, file_id) if dir_id: current_size = seafile_api.get_dir_size(src_repo.store_id, src_repo.version, dir_id) # check if above quota for dst repo if seafile_api.check_quota(dst_repo_id, current_size) < 0: return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota.")) new_dirent_name = check_filename_with_rename(dst_repo_id, dst_parent_dir, src_dirent_name) username = request.user.username if operation == 'move': # permission check for src parent dir if check_folder_permission(request, src_repo_id, src_parent_dir) != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) if dirent_type == 'dir' and src_repo_id == dst_repo_id and \ dst_parent_dir.startswith(src_dirent_path + '/'): error_msg = _(u'Can not move directory %(src)s to its subdirectory %(des)s') \ % {'src': escape(src_dirent_path), 'des': escape(dst_parent_dir)} return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if dirent_type == 'file': # check file lock try: is_locked, locked_by_me = check_file_lock(src_repo_id, src_dirent_path, username) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if is_locked and not locked_by_me: error_msg = _("File is locked") return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: res = seafile_api.move_file(src_repo_id, src_parent_dir, src_dirent_name, dst_repo_id, dst_parent_dir, new_dirent_name, replace=False, username=username, need_progress=1) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if operation == 'copy': # permission check for src parent dir if parse_repo_perm(check_folder_permission( request, src_repo_id, src_parent_dir)).can_copy is False: error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: res = seafile_api.copy_file(src_repo_id, src_parent_dir, src_dirent_name, dst_repo_id, dst_parent_dir, new_dirent_name, username=username, need_progress=1) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if not res: error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) result = {} if res.background: result['task_id'] = res.task_id return Response(result)