def get(self, request, repo_id): """ Get file history within certain commits. Controlled by path(rev_renamed_old_path), commit_id and next_start_commit. """ # argument check path = request.GET.get('path', '') if not path: error_msg = 'path invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) commit_id = repo.head_cmmt_id try: avatar_size = int(request.GET.get('avatar_size', 32)) page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '25')) except ValueError: avatar_size = 32 page = 1 per_page = 25 # Don't use seafile_api.get_file_id_by_path() # if path parameter is `rev_renamed_old_path`. # seafile_api.get_file_id_by_path() will return None. file_id = seafile_api.get_file_id_by_commit_and_path( repo_id, commit_id, path) if not file_id: error_msg = 'File %s not found.' % path return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if not check_folder_permission(request, repo_id, '/'): error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) start = (page - 1) * per_page count = per_page try: file_revisions, total_count = get_file_history( repo_id, path, start, count) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) data = [ get_new_file_history_info(ent, avatar_size) for ent in file_revisions ] result = {"data": data, "page": page, "total_count": total_count} return Response(result)
def get(self, request, repo_id): """ Get file history within certain commits. Controlled by path(rev_renamed_old_path), commit_id and next_start_commit. """ # argument check path = request.GET.get('path', '') if not path: error_msg = 'path invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) commit_id = repo.head_cmmt_id try: avatar_size = int(request.GET.get('avatar_size', 32)) page = int(request.GET.get('page', '1')) per_page = int(request.GET.get('per_page', '25')) except ValueError: avatar_size = 32 page = 1 per_page = 25 # Don't use seafile_api.get_file_id_by_path() # if path parameter is `rev_renamed_old_path`. # seafile_api.get_file_id_by_path() will return None. file_id = seafile_api.get_file_id_by_commit_and_path(repo_id, commit_id, path) if not file_id: error_msg = 'File %s not found.' % path return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if not check_folder_permission(request, repo_id, '/'): error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) start = (page - 1) * per_page count = per_page try: file_revisions, total_count = get_file_history(repo_id, path, start, count) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) data = [get_new_file_history_info(ent, avatar_size) for ent in file_revisions] result = { "data": data, "page": page, "total_count": total_count } return Response(result)
def get(self, request, repo_id): """ Get file history within certain commits. Controlled by path(rev_renamed_old_path), commit_id and next_start_commit. """ # argument check path = request.GET.get('path', '') if not path: error_msg = 'path invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) commit_id = request.GET.get('commit_id', '') if not commit_id: commit_id = repo.head_cmmt_id try: avatar_size = int(request.GET.get('avatar_size', 32)) except ValueError: avatar_size = 32 # Don't use seafile_api.get_file_id_by_path() # if path parameter is `rev_renamed_old_path`. # seafile_api.get_file_id_by_path() will return None. file_id = seafile_api.get_file_id_by_commit_and_path( repo_id, commit_id, path) if not file_id: error_msg = 'File %s not found.' % path return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if not check_folder_permission(request, repo_id, '/'): error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # get repo history limit try: keep_days = seafile_api.get_repo_history_limit(repo_id) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) #### KEEPER # get file history limit = request.GET.get('limit', -1) try: limit = -1 if int(limit) < 1 else int(limit) except ValueError: limit = -1 # get file history # limit = request.GET.get('limit', 50) # try: # limit = 50 if int(limit) < 1 else int(limit) # except ValueError: # limit = 50 try: file_revisions, next_start_commit = get_file_revisions_within_limit( repo_id, path, commit_id, limit) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) result = [] present_time = datetime.utcnow() for commit in file_revisions: history_time = datetime.utcfromtimestamp(commit.ctime) if (keep_days != -1) and ( (present_time - history_time).days > keep_days): next_start_commit = False break info = get_file_history_info(commit, avatar_size) info['path'] = path result.append(info) return Response({ "data": result, "next_start_commit": next_start_commit or False })
def get(self, request, workspace_id, name, commit_id): """Get dtable snapshot by commit_id """ table_name = name table_file_name = table_name + FILE_TYPE # resource check workspace = Workspaces.objects.get_workspace_by_id(workspace_id) if not workspace: error_msg = 'Workspace %s not found.' % workspace_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) if '@seafile_group' in workspace.owner: group_id = workspace.owner.split('@')[0] group = ccnet_api.get_group(int(group_id)) if not group: error_msg = 'Group %s not found.' % group_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) repo_id = workspace.repo_id repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) dtable = DTables.objects.get_dtable(workspace, table_name) if not dtable: error_msg = 'dtable %s not found.' % table_name return api_error(status.HTTP_404_NOT_FOUND, error_msg) # check for get download link table_path = normalize_file_path(table_file_name) table_file_id = seafile_api.get_file_id_by_path(repo_id, table_path) if not table_file_id: error_msg = 'file %s not found.' % table_file_name return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check username = request.user.username if not check_dtable_permission(username, workspace, dtable): error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # main try: snapshot = DTableSnapshot.objects.get_by_commit_id(commit_id) # check if not snapshot: error_msg = 'commit_id not found.' return api_error(status.HTTP_404_NOT_FOUND, error_msg) dtable_uuid = str(dtable.uuid.hex) if dtable_uuid != snapshot.dtable_uuid: error_msg = 'commit_id invalid.' return api_error(status.HTTP_404_NOT_FOUND, error_msg) # get by commit snapshot_table_path = normalize_file_path(snapshot.dtable_name) obj_id = seafile_api.get_file_id_by_commit_and_path( repo_id, commit_id, snapshot_table_path) if not obj_id: return api_error(status.HTTP_404_NOT_FOUND, 'snapshot not found.') except Exception as e: logger.error(e) return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') # download url token = seafile_api.get_fileserver_access_token( repo_id, obj_id, 'download', username, FILESERVER_TOKEN_ONCE_ONLY) if not token: error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) redirect_url = gen_file_get_url(token, snapshot.dtable_name) return Response(redirect_url)
def test_file_property_and_dir_listing(): t_repo_version = 1 t_repo_id = api.create_repo('test_file_property_and_dir_listing', '', USER, passwd=None) create_the_file() api.post_file(t_repo_id, file_path, '/', file_name, USER) api.post_dir(t_repo_id, '/', dir_name, USER) api.post_file(t_repo_id, file_path, '/' + dir_name, file_name, USER) #test is_valid_filename t_valid_file_name = 'valid_filename' t_invalid_file_name = '/invalid_filename' assert api.is_valid_filename(t_repo_id, t_valid_file_name) assert api.is_valid_filename(t_repo_id, t_invalid_file_name) == 0 #test get_file_id_by_path t_file_id = api.get_file_id_by_path(t_repo_id, '/test.txt') assert t_file_id #test get_dir_id_by_path t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir') assert t_dir_id #test get_file_size t_file_size = len(file_content) assert t_file_size == api.get_file_size(t_repo_id, t_repo_version, t_file_id) #test get_dir_size t_dir_size = len(file_content) assert t_dir_size == api.get_dir_size(t_repo_id, t_repo_version, t_dir_id) #test get_file_count_info_by_path t_file_count_info = api.get_file_count_info_by_path(t_repo_id, '/') assert t_file_count_info.file_count == 2 assert t_file_count_info.dir_count == 1 assert t_file_count_info.size == t_file_size + t_dir_size #test get_file_id_by_commit_and_path t_file_id_tmp = t_file_id t_repo = api.get_repo(t_repo_id) assert t_repo t_commit_id = t_repo.head_cmmt_id t_file_id = api.get_file_id_by_commit_and_path(t_repo_id, t_commit_id, '/test.txt') assert t_file_id == t_file_id_tmp #test get_dirent_by_path std_file_mode = 0100000 | 0644 t_dirent_obj = api.get_dirent_by_path(t_repo_id, '/test.txt') assert t_dirent_obj assert t_dirent_obj.obj_id == t_file_id assert t_dirent_obj.obj_name == 'test.txt' assert t_dirent_obj.mode == std_file_mode assert t_dirent_obj.version == t_repo_version assert t_dirent_obj.size == t_file_size assert t_dirent_obj.modifier == USER #test list_file_by_file_id t_block_list = api.list_file_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_blocks_by_file_id t_block_list = api.list_blocks_by_file_id(t_repo_id, t_file_id) assert t_block_list #test list_dir_by_dir_id t_dir_list = api.list_dir_by_dir_id(t_repo_id, t_dir_id) assert len(t_dir_list) == 1 #test list_dir_by_path t_dir_list = api.list_dir_by_path(t_repo_id, '/test_dir') assert len(t_dir_list) == 1 #test get_dir_id_by_commit_and_path t_dir_id = api.get_dir_id_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert t_dir_id #test list_dir_by_commit_and_path t_dir_list = api.list_dir_by_commit_and_path(t_repo_id, t_commit_id, '/test_dir') assert len(t_dir_list) == 1 #test list_dir_with_perm t_dir_list = api.list_dir_with_perm(t_repo_id, '/test_dir', t_dir_id, USER) assert len(t_dir_list) == 1 #test mkdir_with_parent api.mkdir_with_parents(t_repo_id, '/test_dir', 'test_subdir', USER) t_dir_id = api.get_dir_id_by_path(t_repo_id, '/test_dir/test_subdir') assert t_dir_id #test get_total_storage t_total_size = api.get_total_storage() t_repo_size = api.get_repo_size(t_repo_id) assert t_total_size == t_repo_size #get_total_file_number time.sleep(1) assert api.get_total_file_number() == 2 api.remove_repo(t_repo_id)
def BloxbergCertView(request, transaction_id, checksum=''): """ View bloxberg certificate(s) """ certificate = BCertificate.objects.get_presentable_certificate( transaction_id, checksum) repo_id = certificate.repo_id repo = get_repo(repo_id) if not repo: raise Http404 username = request.user.username repo_owner = get_repo_owner(repo_id) if repo_owner != username: return render_error(request, _('Permission denied')) try: server_crypto = UserOptions.objects.is_server_crypto(username) except CryptoOptionNotSetError: # Assume server_crypto is ``False`` if this option is not set. server_crypto = False reverse_url = reverse('lib_view', args=[repo_id, repo.name, '']) if repo.encrypted and \ (repo.enc_version == 1 or (repo.enc_version == 2 and server_crypto)) \ and not is_password_set(repo.id, username): return render( request, 'decrypt_repo_form.html', { 'repo': repo, 'next': get_next_url_from_request(request) or reverse_url, }) if certificate.content_type == 'dir': commit_id = certificate.commit_id current_commit = get_commit(repo.id, repo.version, commit_id) if not current_commit: current_commit = get_commit(repo.id, repo.version, repo.head_cmmt_id) certificates = BCertificate.objects.get_children_bloxberg_certificates( transaction_id, repo_id) checksum_map = {} for certificate in certificates: checksum_map[str(certificate.path)] = certificate.checksum return render( request, 'bloxberg_repo_snapshot_react.html', { 'repo': repo, 'current_commit': current_commit, 'transaction_id': transaction_id, 'checksums': json.dumps(checksum_map), }) else: md_json = json.loads(certificate.md) pdf_url = SERVICE_URL + "/api2/bloxberg-pdf/" + transaction_id + "/" + checksum + "/?p=" + quote_plus( certificate.path) metadata_url = SERVICE_URL + "/api2/bloxberg-metadata/" + transaction_id + "/" + checksum + "/?p=" + quote_plus( certificate.path) history_file_url = "" obj_id = seafile_api.get_file_id_by_commit_and_path( repo_id, certificate.commit_id, certificate.path) if obj_id is not None: history_file_url = "/repo/" + repo_id + "/" + obj_id + "/download/?file_name=" + quote_plus( certificate.content_name) + "&p=" + quote_plus( certificate.path) if md_json.get('authors'): authors = get_authors_from_catalog_md(md_json) return render( request, './catalog_detail/bloxberg_cert_page.html', { 'repo_name': md_json.get('title'), 'repo_desc': md_json.get('description') if md_json.get('description') else '', 'institute': md_json.get('institute') if md_json.get('Institute') else '', 'authors': authors, 'year': md_json.get('year'), 'transaction_id': certificate.transaction_id, 'pdf_url': pdf_url, 'metadata_url': metadata_url, 'history_file_url': history_file_url }) elif md_json.get( 'Title' ): #backwards compatible(certificates created before 2.0) return render( request, './catalog_detail/bloxberg_cert_page.html', { 'repo_name': md_json.get('Title'), 'repo_desc': md_json.get('Description') if md_json.get('Description') else '', 'institute': md_json.get('Institute') if md_json.get('Institute') else '', 'authors': md_json.get('Author'), 'year': md_json.get('Year'), 'transaction_id': certificate.transaction_id, 'pdf_url': pdf_url, 'metadata_url': metadata_url, 'history_file_url': history_file_url }) else: return render( request, './catalog_detail/bloxberg_cert_page.html', { 'repo_name': md_json.get('name'), 'repo_desc': md_json.get('Description') if md_json.get('Description') else '', 'institute': md_json.get('Institute') if md_json.get('Institute') else '', 'authors': md_json.get('owner'), 'year': '', 'transaction_id': certificate.transaction_id, 'pdf_url': pdf_url, 'metadata_url': metadata_url, 'history_file_url': history_file_url })
def get(self, request, repo_id): """ Get file history within certain commits. Controlled by path(rev_renamed_old_path), commit_id and next_start_commit. """ # argument check path = request.GET.get('path', '') if not path: error_msg = 'path invalid.' return api_error(status.HTTP_400_BAD_REQUEST, error_msg) # resource check repo = seafile_api.get_repo(repo_id) if not repo: error_msg = 'Library %s not found.' % repo_id return api_error(status.HTTP_404_NOT_FOUND, error_msg) commit_id = request.GET.get('commit_id', '') if not commit_id: commit_id = repo.head_cmmt_id try: avatar_size = int(request.GET.get('avatar_size', 32)) except ValueError: avatar_size = 32 # Don't use seafile_api.get_file_id_by_path() # if path parameter is `rev_renamed_old_path`. # seafile_api.get_file_id_by_path() will return None. file_id = seafile_api.get_file_id_by_commit_and_path(repo_id, commit_id, path) if not file_id: error_msg = 'File %s not found.' % path return api_error(status.HTTP_404_NOT_FOUND, error_msg) # permission check if not check_folder_permission(request, repo_id, '/'): error_msg = 'Permission denied.' return api_error(status.HTTP_403_FORBIDDEN, error_msg) # get file history limit = request.GET.get('limit', 50) try: limit = 50 if int(limit) < 1 else int(limit) except ValueError: limit = 50 try: file_revisions, next_start_commit = get_file_revisions_within_limit( repo_id, path, commit_id, limit) except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) result = [] for commit in file_revisions: info = get_file_history_info(commit, avatar_size) info['path'] = path result.append(info) return Response({ "data": result, "next_start_commit": next_start_commit or False })