def post_asset_files(repo_id, dtable_uuid, username): """ used to import dtable post asset files in /tmp/dtable-io/<dtable_uuid>/dtable_zip_extracted/ to file server :return: """ asset_root_path = os.path.join('/asset', dtable_uuid) tmp_extracted_path = os.path.join('/tmp/dtable-io', dtable_uuid, 'dtable_zip_extracted/') for root, dirs, files in os.walk(tmp_extracted_path): for file_name in files: if file_name in ['content.json', 'forms.json']: continue inner_path = root[len(tmp_extracted_path) + 6:] # path inside zip tmp_file_path = os.path.join(root, file_name) cur_file_parent_path = os.path.join(asset_root_path, inner_path) # check current file's parent path before post file path_id = seafile_api.get_dir_id_by_path(repo_id, cur_file_parent_path) if not path_id: seafile_api.mkdir_with_parents(repo_id, '/', cur_file_parent_path[1:], username) seafile_api.post_file(repo_id, tmp_file_path, cur_file_parent_path, file_name, username)
def repo_created_cb(sender, **kwargs): org_id = kwargs['org_id'] creator = kwargs['creator'] repo_id = kwargs['repo_id'] repo_name = kwargs['repo_name'] etype = 'repo-create' detail = { 'creator': creator, 'repo_id': repo_id, 'repo_name': repo_name, } users = [creator] session = SeafEventsSession() if org_id > 0: seafevents.save_org_user_events(session, org_id, etype, detail, users, None) else: seafevents.save_user_events(session, etype, detail, users, None) session.close() LIBRARY_TEMPLATES = getattr(settings, 'LIBRARY_TEMPLATES', {}) library_template = kwargs['library_template'] if isinstance(library_template, unicode): library_template = library_template.encode('utf-8') try: dir_path_list = LIBRARY_TEMPLATES[library_template] for dir_path in dir_path_list: seafile_api.mkdir_with_parents(repo_id, '/', dir_path.strip('/'), creator) except Exception as e: logger.error(e)
def repo_created_cb(sender, **kwargs): org_id = kwargs["org_id"] creator = kwargs["creator"] repo_id = kwargs["repo_id"] repo_name = kwargs["repo_name"] etype = "repo-create" detail = {"creator": creator, "repo_id": repo_id, "repo_name": repo_name} users = [creator] session = SeafEventsSession() if org_id > 0: seafevents.save_org_user_events(session, org_id, etype, detail, users, None) else: seafevents.save_user_events(session, etype, detail, users, None) session.close() LIBRARY_TEMPLATES = getattr(settings, "LIBRARY_TEMPLATES", {}) library_template = kwargs["library_template"] if isinstance(library_template, unicode): library_template = library_template.encode("utf-8") try: dir_path_list = LIBRARY_TEMPLATES[library_template] for dir_path in dir_path_list: seafile_api.mkdir_with_parents(repo_id, "/", dir_path.strip("/"), creator) except Exception as e: logger.error(e)
def get(self, request, workspace_id): """get table file upload link """ # argument check table_name = request.GET.get('name', None) if not table_name: error_msg = 'name invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check workspace = Workspaces.objects.get_workspace_by_id(workspace_id) if not workspace: error_msg = 'Workspace %s not found.' % workspace_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) repo_id = workspace.repo_id repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) dtable = DTables.objects.get_dtable(workspace, table_name) if not dtable: error_msg = 'dtable %s not found.' % table_name return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check username = request.user.username owner = workspace.owner if username != owner: error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: token = seafile_api.get_fileserver_access_token(repo_id, 'dummy', 'upload', '', use_onetime=False) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) upload_link = gen_file_upload_url(token, 'upload-api') # create asset dir asset_dir_path = '/asset/' + str(dtable.uuid) asset_dir_id = seafile_api.get_dir_id_by_path(repo_id, asset_dir_path) if not asset_dir_id: seafile_api.mkdir_with_parents(repo_id, '/', asset_dir_path[1:], owner) dtable.modifier = username dtable.save() res = dict() res['upload_link'] = upload_link res['parent_path'] = asset_dir_path return Response(res)
def get(self, request, token): # resource check form_obj = DTableForms.objects.get_form_by_token(token) if not form_obj: error_msg = 'Form %s not found.' % token return api_error(status.HTTP_404_NOT_FOUND, error_msg) workspace_id = form_obj.workspace_id workspace = Workspaces.objects.get_workspace_by_id(workspace_id) if not workspace: error_msg = 'Workspace %s not found.' % workspace_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) repo_id = workspace.repo_id dtable_uuid = form_obj.dtable_uuid dtable = DTables.objects.get_dtable_by_uuid(dtable_uuid) if not dtable: error_msg = 'Table %s not found.' % dtable_uuid return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if not check_form_submit_permission(request, form_obj): error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) if not check_user_workspace_quota(workspace): error_msg = 'Asset quota exceeded.' return api_error(HTTP_443_ABOVE_QUOTA, error_msg) # create asset dir asset_dir_path = '/asset/' + str(dtable.uuid) asset_dir_id = seafile_api.get_dir_id_by_path(repo_id, asset_dir_path) if not asset_dir_id: seafile_api.mkdir_with_parents(repo_id, '/', asset_dir_path[1:], '') # get token obj_id = json.dumps({'parent_dir': asset_dir_path}) try: token = seafile_api.get_fileserver_access_token(repo_id, obj_id, 'upload', '', use_onetime=False) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) upload_link = gen_file_upload_url(token, 'upload-api') res = dict() res['upload_link'] = upload_link res['parent_path'] = asset_dir_path res['img_relative_path'] = FORM_UPLOAD_IMG_RELATIVE_PATH res['file_relative_path'] = os.path.join(UPLOAD_FILE_RELATIVE_PATH, str(datetime.today())[:7]) return Response(res)
def repo_created_cb(sender, **kwargs): org_id = kwargs['org_id'] creator = kwargs['creator'] repo_id = kwargs['repo_id'] repo_name = kwargs['repo_name'] # Move here to avoid model import during Django setup. # TODO: Don't register signal/handlers during Seahub start. if org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo( org_id, repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) org_id = -1 if creator not in related_users: related_users.append(creator) record = { 'op_type': 'create', 'obj_type': 'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo_name, 'path': '/', 'op_user': creator, 'related_users': related_users, 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close() LIBRARY_TEMPLATES = getattr(settings, 'LIBRARY_TEMPLATES', {}) library_template = kwargs['library_template'] if LIBRARY_TEMPLATES and library_template: if isinstance(library_template, unicode): library_template = library_template.encode('utf-8') try: dir_path_list = LIBRARY_TEMPLATES[library_template] for dir_path in dir_path_list: seafile_api.mkdir_with_parents(repo_id, '/', dir_path.strip('/'), related_users) except Exception as e: logger.error(e)
def repo_created_cb(sender, **kwargs): org_id = kwargs['org_id'] creator = kwargs['creator'] repo_id = kwargs['repo_id'] repo_name = kwargs['repo_name'] # Move here to avoid model import during Django setup. # TODO: Don't register signal/hanlders during Seahub start. if org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo(org_id, repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) org_id = -1 related_users.append(creator) record = { 'op_type':'create', 'obj_type':'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo_name, 'path': '/', 'op_user': creator, 'related_users': related_users, 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close() LIBRARY_TEMPLATES = getattr(settings, 'LIBRARY_TEMPLATES', {}) library_template = kwargs['library_template'] if LIBRARY_TEMPLATES and library_template: if isinstance(library_template, unicode): library_template = library_template.encode('utf-8') try: dir_path_list = LIBRARY_TEMPLATES[library_template] for dir_path in dir_path_list: seafile_api.mkdir_with_parents(repo_id, '/', dir_path.strip('/'), related_users) except Exception as e: logger.error(e)
def create_plugin_asset_files(repo_id, username, plugin_name, plugin_path, folder_path): for root, dirs, files in os.walk(TMP_EXTRACTED_PATH): for file_name in files: inner_path = root[len(TMP_EXTRACTED_PATH):] # path inside plugin zip tmp_file_path = os.path.join(root, file_name) cur_file_parent_path = os.path.join(plugin_path, plugin_name, inner_path, '') # check zip of a folder if folder_path: cur_file_parent_path = rreplace(cur_file_parent_path, folder_path + '/', '', 1) # check current file's parent path before post file path_id = seafile_api.get_dir_id_by_path(repo_id, cur_file_parent_path) if not path_id: seafile_api.mkdir_with_parents(repo_id, '/', cur_file_parent_path[1:], username) seafile_api.post_file(repo_id, tmp_file_path, cur_file_parent_path, file_name, username)
def repo_created_cb(sender, **kwargs): org_id = kwargs['org_id'] creator = kwargs['creator'] repo_id = kwargs['repo_id'] repo_name = kwargs['repo_name'] etype = 'repo-create' detail = { 'creator': creator, 'repo_id': repo_id, 'repo_name': repo_name, } users = [creator] # Move here to avoid model import during Django setup. # TODO: Don't register signal/hanlders during Seahub start. from utils import SeafEventsSession session = SeafEventsSession() if org_id > 0: seafevents.save_org_user_events(session, org_id, etype, detail, users, None) else: seafevents.save_user_events(session, etype, detail, users, None) session.close() LIBRARY_TEMPLATES = getattr(settings, 'LIBRARY_TEMPLATES', {}) library_template = kwargs['library_template'] if LIBRARY_TEMPLATES and library_template: if isinstance(library_template, unicode): library_template = library_template.encode('utf-8') try: dir_path_list = LIBRARY_TEMPLATES[library_template] for dir_path in dir_path_list: seafile_api.mkdir_with_parents(repo_id, '/', dir_path.strip('/'), creator) except Exception as e: logger.error(e)
def _migrate_image(self, dtable, link): """ migrate asset from forms path to dtable asset path :param dtable: which dtable :param link: form asset link :return: (error, None) or (None, new_link) """ link = parse.unquote(link.strip('/')) image_name = os.path.basename(link) form_image_path = os.path.join('/asset', str(dtable.uuid), FORM_UPLOAD_IMG_RELATIVE_PATH, image_name) repo_id = dtable.workspace.repo_id if not seafile_api.get_file_id_by_path(repo_id, form_image_path): logger.error('can\'t find form image by path: %s', form_image_path) return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal server error'), None dtable_asset_image_dir = os.path.join('/asset', str(dtable.uuid), UPLOAD_IMG_RELATIVE_PATH, str(datetime.today())[:7]) dtable_asset_image_dir_id = seafile_api.get_dir_id_by_path( repo_id, dtable_asset_image_dir) if not dtable_asset_image_dir_id: seafile_api.mkdir_with_parents(repo_id, '/', dtable_asset_image_dir[1:], 'form') new_image_name = check_filename_with_rename(repo_id, dtable_asset_image_dir, image_name) seafile_api.move_file(repo_id, os.path.dirname(form_image_path), image_name, repo_id, dtable_asset_image_dir, new_image_name, 0, 'form', 0) path = os.path.join(UPLOAD_IMG_RELATIVE_PATH, str(datetime.today())[:7], new_image_name) new_image_url = '/workspace/%s/asset/%s/%s' % (dtable.workspace_id, str(dtable.uuid), path) return None, DTABLE_WEB_SERVICE_URL.rstrip('/') + new_image_url
def copy_asset(src_repo_id, src_dtable_uuid, dst_repo_id, dst_dtable_uuid, username): src_asset_dir = os.path.join('/asset', str(src_dtable_uuid)) src_asset_dir_id = seafile_api.get_dir_id_by_path(src_repo_id, src_asset_dir) if src_asset_dir_id: dst_asset_dir = os.path.join('/asset', str(dst_dtable_uuid)) src_asset_base_dir, dst_asset_base_dir = os.path.dirname( src_asset_dir), os.path.dirname(dst_asset_dir) if not seafile_api.get_dir_id_by_path(dst_repo_id, dst_asset_base_dir): seafile_api.mkdir_with_parents(dst_repo_id, '/', dst_asset_base_dir[1:], username) res = seafile_api.copy_file(src_repo_id, src_asset_base_dir, str(src_dtable_uuid), dst_repo_id, dst_asset_base_dir, str(dst_dtable_uuid), username=username, need_progress=0, synchronous=1) return res
def post(self, request): """ Multi create folders. Permission checking: 1. user with `rw` permission for every layer of subdirectories. Parameter: { "repo_id": "4dfdf5b6-806f-4a35-b2b7-604051d2114e", "paths": ["/1/2/", "/3/4/", "/5/6"] } """ # argument check path_list = request.data.get('paths', None) if not path_list: error_msg = 'paths invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) repo_id = request.data.get('repo_id', None) if not repo_id: error_msg = 'repo_id invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if check_folder_permission(request, repo_id, '/') != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) result = {} result['failed'] = [] result['success'] = [] username = request.user.username for path in path_list: common_dict = { 'repo_id': repo_id, 'path': path, } path = normalize_dir_path(path) obj_name_list = path.strip('/').split('/') for obj_name in obj_name_list: try: # check if path is valid is_valid_name = seafile_api.is_valid_filename( 'fake_repo_id', obj_name) except Exception as e: logger.error(e) error_dict = {'error_msg': 'Internal Server Error'} common_dict.update(error_dict) result['failed'].append(common_dict) continue if not is_valid_name: error_dict = {'error_msg': 'path invalid.'} common_dict.update(error_dict) result['failed'].append(common_dict) continue if seafile_api.get_dir_id_by_path(repo_id, path): error_dict = {'error_msg': 'Folder already exists.'} common_dict.update(error_dict) result['failed'].append(common_dict) continue # check parent directory's permission parent_dir = os.path.dirname(path.rstrip('/')) try: permission = get_folder_permission_recursively( username, repo_id, parent_dir) except Exception as e: logger.error(e) error_dict = {'error_msg': 'Internal Server Error'} common_dict.update(error_dict) result['failed'].append(common_dict) continue if permission != 'rw': error_dict = {'error_msg': 'Permission denied.'} common_dict.update(error_dict) result['failed'].append(common_dict) continue try: # rename obj name if name is existed seafile_api.mkdir_with_parents(repo_id, '/', path.strip('/'), username) except Exception as e: logger.error(e) error_dict = {'error_msg': 'Internal Server Error'} common_dict.update(error_dict) result['failed'].append(common_dict) continue result['success'].append(common_dict) return Response(result)
def get(self, request): """get file upload link by dtable api token Permission: 1. valid token """ # argument check auth = request.META.get('HTTP_AUTHORIZATION', '').split() if not auth or auth[0].lower() != 'token' or len(auth) != 2: return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') api_token = auth[1] # resource check try: api_token_obj = DTableAPIToken.objects.get_by_token(api_token) if not api_token_obj: return api_error(status.HTTP_404_NOT_FOUND, 'api token not found.') except Exception as e: logger.error(e) error_msg = 'Internal Server Error.' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) dtable = api_token_obj.dtable table_name = dtable.name workspace_id = dtable.workspace_id error, workspace, dtable = _resource_check(workspace_id, table_name) if error: return error if not check_user_workspace_quota(workspace): return api_error(HTTP_443_ABOVE_QUOTA, 'Asset quota exceeded.') # create asset dir repo_id = workspace.repo_id asset_dir_path = '/asset/' + str(dtable.uuid) asset_dir_id = seafile_api.get_dir_id_by_path(repo_id, asset_dir_path) if not asset_dir_id: try: seafile_api.mkdir_with_parents(repo_id, '/', asset_dir_path[1:], api_token_obj.generated_by) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) # get token obj_id = json.dumps({'parent_dir': asset_dir_path}) try: token = seafile_api.get_fileserver_access_token(repo_id, obj_id, 'upload', '', use_onetime=False) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) upload_link = gen_file_upload_url(token, 'upload-api') api_token_obj.update_last_access() res = dict() res['upload_link'] = upload_link res['parent_path'] = asset_dir_path return Response(res)
def post(self, request, repo_id, format=None): repo = seafile_api.get_repo(repo_id) if not repo: return api_error(status.HTTP_404_NOT_FOUND, 'Library not found.') path = request.GET.get('p', '') if not path or path[0] != '/': error_msg = 'p invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if path == '/': error_msg = 'Can not make or rename root dir.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if path[-1] == '/': path = path[:-1] username = request.user.username parent_dir = os.path.dirname(path) operation = request.POST.get('operation', '') if operation.lower() == 'mkdir': parent_dir = os.path.dirname(path) if check_folder_permission(request, repo_id, parent_dir) != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) create_parents = request.POST.get('create_parents', '').lower() in ('true', '1') if not create_parents: parent_dir_id = seafile_api.get_dir_id_by_path(repo_id, parent_dir) if not parent_dir_id: error_msg = 'Folder %s not found.' % parent_dir return api_error(status.HTTP_404_NOT_FOUND, error_msg) new_dir_name = os.path.basename(path) new_dir_name = check_filename_with_rename(repo_id, parent_dir, new_dir_name) try: seafile_api.post_dir(repo_id, parent_dir, new_dir_name, username) except SearpcError as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) else: if not is_pro_version(): error_msg = 'Feature not supported.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: seafile_api.mkdir_with_parents(repo_id, '/', path[1:], username) except SearpcError as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) if request.GET.get('reloaddir', '').lower() == 'true': resp = reloaddir(request, repo, parent_dir) else: resp = Response({'success': True}) return resp elif operation.lower() == 'rename': dir_id = seafile_api.get_dir_id_by_path(repo_id, path) if not dir_id: error_msg = 'Folder %s not found.' % path return api_error(status.HTTP_404_NOT_FOUND, error_msg) if check_folder_permission(request, repo.id, path) != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) parent_dir = os.path.dirname(path) old_dir_name = os.path.basename(path) newname = request.POST.get('newname', '') if not newname: error_msg = 'newname invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if newname == old_dir_name: return Response({'success': True}) try: # rename duplicate name checked_newname = check_filename_with_rename(repo_id, parent_dir, newname) # rename dir seafile_api.rename_file(repo_id, parent_dir, old_dir_name, checked_newname, username) return Response({'success': True}) except SearpcError, e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
def test_file_property_and_dir_listing(): t_repo_version = 1 t_repo_id = api.create_repo('test_file_property_and_dir_listing', '', USER, passwd=None) create_the_file() api.post_file(t_repo_id, file_path, '/', file_name, USER) api.post_dir(t_repo_id, '/', dir_name, USER) api.post_file(t_repo_id, file_path, '/' + dir_name, file_name, USER) #test is_valid_filename t_valid_file_name = 'valid_filename' t_invalid_file_name = '/invalid_filename' assert api.is_valid_filename(t_repo_id, t_valid_file_name) assert api.is_valid_filename(t_repo_id, t_invalid_file_name) == 0 #test get_file_id_by_path t_file_id = api.get_file_id_by_path(t_repo_id, '/test.txt') assert t_file_id #test get_dir_id_by_path t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir') assert t_dir_id #test get_file_size t_file_size = len(file_content) assert t_file_size == api.get_file_size(t_repo_id, t_repo_version, t_file_id) #test get_dir_size t_dir_size = len(file_content) assert t_dir_size == api.get_dir_size(t_repo_id, t_repo_version, t_dir_id) #test get_file_count_info_by_path t_file_count_info = api.get_file_count_info_by_path(t_repo_id, '/') assert t_file_count_info.file_count == 2 assert t_file_count_info.dir_count == 1 assert t_file_count_info.size == t_file_size + t_dir_size #test get_file_id_by_commit_and_path t_file_id_tmp = t_file_id t_repo = api.get_repo(t_repo_id) assert t_repo t_commit_id = t_repo.head_cmmt_id t_file_id = api.get_file_id_by_commit_and_path(t_repo_id, t_commit_id, '/test.txt') assert t_file_id == t_file_id_tmp #test get_dirent_by_path std_file_mode = 0100000 | 0644 t_dirent_obj = api.get_dirent_by_path(t_repo_id, '/test.txt') assert t_dirent_obj assert t_dirent_obj.obj_id == t_file_id assert t_dirent_obj.obj_name == 'test.txt' assert t_dirent_obj.mode == std_file_mode assert t_dirent_obj.version == t_repo_version assert t_dirent_obj.size == t_file_size assert t_dirent_obj.modifier == USER #test list_file_by_file_id t_block_list = api.list_file_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_blocks_by_file_id t_block_list = api.list_blocks_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_dir_by_dir_id t_dir_list = api.list_dir_by_dir_id(t_repo_id, t_dir_id) assert len(t_dir_list) == 1 #test list_dir_by_path t_dir_list = api.list_dir_by_path(t_repo_id, '/test_dir') assert len(t_dir_list) == 1 #test get_dir_id_by_commit_and_path t_dir_id = api.get_dir_id_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert t_dir_id #test list_dir_by_commit_and_path t_dir_list = api.list_dir_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert len(t_dir_list) == 1 #test list_dir_with_perm t_dir_list = api.list_dir_with_perm(t_repo_id, '/test_dir', t_dir_id, USER) assert len(t_dir_list) == 1 #test mkdir_with_parent api.mkdir_with_parents(t_repo_id, '/test_dir', 'test_subdir', USER) t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir/test_subdir') assert t_dir_id #test get_total_storage t_total_size = api.get_total_storage() t_repo_size = api.get_repo_size(t_repo_id) assert t_total_size == t_repo_size #get_total_file_number time.sleep(1) assert api.get_total_file_number() == 2 api.remove_repo(t_repo_id)
def post(self, request, workspace_id, name): """ upload a plugin *.zip file 1. check params, perms and resources 2. read info from zip file, and extract zip in TMP_EXTRACTED_PATH 3. create file in asset dir, and delete TMP_EXTRACTED_PATH 4. record in database There are two tmp files in this api. First is django upload tmp file, it will be removed automatically. Second is extracted folder 'TMP_EXTRACTED_PATH', we removed it manually. permission: workspace owner or admin """ # use TemporaryFileUploadHandler, which contains TemporaryUploadedFile # TemporaryUploadedFile has temporary_file_path() method # in order to change upload_handlers, we must exempt csrf check request.upload_handlers = [TemporaryFileUploadHandler(request=request)] table_name = name from_market = request.data.get('from_market', 'false').lower() workspace = Workspaces.objects.get_workspace_by_id(workspace_id) if not workspace: error_msg = 'Workspace %s not found.' % workspace_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) if '@seafile_group' in workspace.owner: group_id = workspace.owner.split('@')[0] group = ccnet_api.get_group(int(group_id)) if not group: error_msg = 'Group %s not found.' % group_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) dtable = DTables.objects.get_dtable(workspace, table_name) if not dtable: error_msg = 'DTable %s not found.' % table_name return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check username = request.user.username permission = check_dtable_admin_permission(username, workspace.owner) if not permission: error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) repo_id = workspace.repo_id repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) if from_market not in ['true', 'false']: # from_market invalid error_msg = 'from_market invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if from_market == 'true': """ if we add plugin from market 1. get plugin_download_url from market by plugin_name 2. download plugin zip by plugin_download_url 3. extract zip in TMP_EXTRACTED_PATH 4. create file in asset dir, and delete TMP_EXTRACTED_PATH 5. record in database """ plugin_name = request.data.get('plugin_name', '') if not plugin_name: error_msg = 'plugin_name invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if DTablePlugins.objects.filter(name=plugin_name, dtable=dtable).count() > 0: error_msg = _('Plugin with name %s is already in dtable %s.') % (plugin_name, dtable.name) return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # get plugin_download_url from market by plugin_name # download plugin zip by plugin_download_url seamarket_plugin_api_url = SEATABLE_MARKET_URL.rstrip('/') + '/api/plugins/' + plugin_name + '/' res = requests.get(seamarket_plugin_api_url) download_url = json.loads(res.content).get('download_url', '') if not download_url: error_msg = 'plugin %s not found.' % plugin_name return api_error(status.HTTP_404_NOT_FOUND, error_msg) plugin_zip_file_response = requests.get(download_url) os.mkdir('/tmp/plugin_download_from_market') tmp_zip_path = '/tmp/plugin_download_from_market/plugin_zip' with open(tmp_zip_path, 'wb') as f: f.write(plugin_zip_file_response.content) # extract zip in TMP_EXTRACTED_PATH with ZipFile(tmp_zip_path, 'r') as zip_file: folder_path = get_folder_path(zip_file.namelist()) try: info_json_str = zip_file.read(os.path.join(folder_path, INFO_FILE_NAME)) except Exception: error_msg = _('"info.json" not found.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) zip_file.extractall(TMP_EXTRACTED_PATH) shutil.rmtree('/tmp/plugin_download_from_market') # create file in asset dir, and delete TMP_EXTRACTED_PATH # if no plugins path, create it plugin_path = '/asset/' + str(dtable.uuid) + '/plugins/' plugin_path_id = seafile_api.get_dir_id_by_path(repo_id, plugin_path) if not plugin_path_id: try: seafile_api.mkdir_with_parents(repo_id, '/', plugin_path[1:], username) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) # if asset dir has plugin with same name, we replace old with new if seafile_api.get_dir_id_by_path(repo_id, os.path.join(plugin_path, plugin_name)): delete_plugin_asset_folder(repo_id, username, os.path.join(plugin_path, plugin_name)) # create path and file try: create_plugin_asset_files(repo_id, username, plugin_name, plugin_path, folder_path) except Exception as e: logger.error(e) return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') # remove extracted tmp file shutil.rmtree(TMP_EXTRACTED_PATH) # 4. record in database plugin_record = DTablePlugins.objects.create( dtable=dtable, added_by=username, added_time=datetime.now(), name=plugin_name, info=info_json_str ) return Response(plugin_record.to_dict()) # 1. check params plugin_file = request.FILES.get('plugin', None) if not plugin_file: error_msg = 'plugin invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) if plugin_file.size >> 20 > 300: error_msg = _('File is too large.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # 2. read info from zip file, and extract zip in TMP_EXTRACTED_PATH uploaded_temp_path = plugin_file.temporary_file_path() if not is_zipfile(uploaded_temp_path): error_msg = _('A zip file is required.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) with ZipFile(uploaded_temp_path, 'r') as zip_file: folder_path = get_folder_path(zip_file.namelist()) try: info_json_str = zip_file.read(os.path.join(folder_path, INFO_FILE_NAME)) info = json.loads(info_json_str) except Exception: error_msg = _('"info.json" not found.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) try: zip_file.read(os.path.join(folder_path, MAINJS_FILE_NAME)) except Exception: error_msg = _('"main.js" not found.') return api_error(status.HTTP_400_BAD_REQUEST, error_msg) plugin_name = info.get('name', '') zip_file.extractall(TMP_EXTRACTED_PATH) if DTablePlugins.objects.filter(name=plugin_name, dtable=dtable).count() > 0: error_msg = _('Plugin with name %s is already in dtable %s.') % (plugin_name, dtable.name) return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # 3. create file in asset dir, and delete TMP_EXTRACTED_PATH # if no plugins path, create it plugin_path = '/asset/' + str(dtable.uuid) + '/plugins/' plugin_path_id = seafile_api.get_dir_id_by_path(repo_id, plugin_path) if not plugin_path_id: try: seafile_api.mkdir_with_parents(repo_id, '/', plugin_path[1:], username) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) # if asset dir has plugin with same name, we replace old with new if seafile_api.get_dir_id_by_path(repo_id, os.path.join(plugin_path, plugin_name)): delete_plugin_asset_folder(repo_id, username, os.path.join(plugin_path, plugin_name)) # create path and file try: create_plugin_asset_files(repo_id, username, plugin_name, plugin_path, folder_path) except Exception as e: logger.error(e) return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') # remove extracted tmp file shutil.rmtree(TMP_EXTRACTED_PATH) # 4. record in database plugin_record = DTablePlugins.objects.create( dtable=dtable, added_by=username, added_time=datetime.now(), name=plugin_name, info=info_json_str ) return Response(plugin_record.to_dict())
def test_copy_dir(self): if not LOCAL_PRO_DEV_ENV: return self.login_as(self.user) # create two folders in src repo src_folder_1 = self.get_random_path() src_folder_2 = self.get_random_path() for path in [src_folder_1, src_folder_2]: seafile_api.mkdir_with_parents(self.repo_id, '/', path.strip('/'), self.user_name) # share admin's tmp repo to user tmp_repo_id = self.create_new_repo(self.admin_name) seafile_api.share_repo(tmp_repo_id, self.admin_name, self.user_name, 'rw') # create two folders as parent dirs in dst repo for admin user dst_folder_1 = self.get_random_path() seafile_api.mkdir_with_parents(tmp_repo_id, '/', dst_folder_1.strip('/'), self.admin_name) dst_folder_2 = '/' # copy folders data = { "src_repo_id": self.repo_id, "dst_repo_id": tmp_repo_id, "paths": [ { "src_path": src_folder_1, "dst_path": dst_folder_1 }, { "src_path": src_folder_2, "dst_path": dst_folder_2 }, ] } resp = self.client.post(self.url, json.dumps(data), 'application/json') self.assertEqual(200, resp.status_code) json_resp = json.loads(resp.content) assert len(json_resp['success']) == 2 assert len(json_resp['failed']) == 0 def folder_exist(src_folder, dst_repo_id, dst_folder): src_obj_name = os.path.basename(src_folder.rstrip('/')) full_dst_folder_path = posixpath.join(dst_folder.strip('/'), src_obj_name.strip('/')) full_dst_folder_path = normalize_dir_path(full_dst_folder_path) return seafile_api.get_dir_id_by_path( dst_repo_id, full_dst_folder_path) is not None assert folder_exist(src_folder_1, tmp_repo_id, dst_folder_1) assert folder_exist(src_folder_2, tmp_repo_id, dst_folder_2) self.remove_repo(tmp_repo_id)
def get(self, request, workspace_id): """get table file upload link Permission: 1. owner 2. group member 3. shared user with `rw` or `admin` permission """ # argument check table_name = request.GET.get('name', None) if not table_name: error_msg = 'name invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check workspace = Workspaces.objects.get_workspace_by_id(workspace_id) if not workspace: error_msg = 'Workspace %s not found.' % workspace_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) repo_id = workspace.repo_id repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) dtable = DTables.objects.get_dtable(workspace, table_name) if not dtable: error_msg = 'dtable %s not found.' % table_name return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check username = request.user.username if check_dtable_permission(username, workspace, dtable) not in WRITE_PERMISSION_TUPLE: error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # quota check if not check_user_workspace_quota(workspace): error_msg = 'Asset quota exceeded.' return api_error(HTTP_443_ABOVE_QUOTA, error_msg) # create asset dir asset_dir_path = os.path.join('/asset', str(dtable.uuid)) asset_dir_id = seafile_api.get_dir_id_by_path(repo_id, asset_dir_path) if not asset_dir_id: seafile_api.mkdir_with_parents(repo_id, '/', asset_dir_path[1:], username) # get token obj_id = json.dumps({'parent_dir': asset_dir_path}) try: token = seafile_api.get_fileserver_access_token(repo_id, obj_id, 'upload', '', use_onetime=False) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) upload_link = gen_file_upload_url(token, 'upload-api') dtable.modifier = username dtable.save() res = dict() res['upload_link'] = upload_link res['parent_path'] = asset_dir_path res['img_relative_path'] = os.path.join(UPLOAD_IMG_RELATIVE_PATH, str(datetime.today())[:7]) res['file_relative_path'] = os.path.join(UPLOAD_FILE_RELATIVE_PATH, str(datetime.today())[:7]) return Response(res)
def post(self, request): """ Multi create folders. Permission checking: 1. user with `rw` permission for every layer of subdirectories. Parameter: { "repo_id": "4dfdf5b6-806f-4a35-b2b7-604051d2114e", "paths": ["/1/2/", "/3/4/", "/5/6"] } """ # argument check path_list = request.data.get('paths', None) if not path_list: error_msg = 'paths invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) repo_id = request.data.get('repo_id', None) if not repo_id: error_msg = 'repo_id invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if check_folder_permission(request, repo_id, '/') != 'rw': error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) result = {} result['failed'] = [] result['success'] = [] username = request.user.username for path in path_list: common_dict = { 'repo_id': repo_id, 'path': path, } path = normalize_dir_path(path) obj_name_list = path.strip('/').split('/') for obj_name in obj_name_list: try: # check if path is valid is_valid_name = seafile_api.is_valid_filename( 'fake_repo_id', obj_name) except Exception as e: logger.error(e) error_dict = { 'error_msg': 'Internal Server Error' } common_dict.update(error_dict) result['failed'].append(common_dict) continue if not is_valid_name: error_dict = { 'error_msg': 'path invalid.' } common_dict.update(error_dict) result['failed'].append(common_dict) continue if seafile_api.get_dir_id_by_path(repo_id, path): error_dict = { 'error_msg': 'Folder already exists.' } common_dict.update(error_dict) result['failed'].append(common_dict) continue # check parent directory's permission parent_dir = os.path.dirname(path.rstrip('/')) try: permission = get_folder_permission_recursively( username, repo_id, parent_dir) except Exception as e: logger.error(e) error_dict = { 'error_msg': 'Internal Server Error' } common_dict.update(error_dict) result['failed'].append(common_dict) continue if permission != 'rw': error_dict = { 'error_msg': 'Permission denied.' } common_dict.update(error_dict) result['failed'].append(common_dict) continue try: # rename obj name if name is existed seafile_api.mkdir_with_parents(repo_id, '/', path.strip('/'), username) except Exception as e: logger.error(e) error_dict = { 'error_msg': 'Internal Server Error' } common_dict.update(error_dict) result['failed'].append(common_dict) continue result['success'].append(common_dict) return Response(result)