def get_external_author_posts(request, encoded_url): author_url = base64.urlsafe_b64decode(encoded_url).decode("utf8") size = int(request.query_params.get("size", DEFAULT_POST_PAGE_SIZE)) page = int(request.query_params.get('page', 0)) + 1 if size < 1 or page < 0 or size > 100: return Response({ "success": False, "message": "The query parameters were invalid", "query": "posts" }, 400) external_host_url = author_url.split("/author/")[0] sUtil = ServerUtil(authorUrl=external_host_url) if not sUtil.valid_server(): print("authorUrl found, but not in DB", external_host_url) return Response({ "query": "posts", "message": "Could not find server", "success": False }, 400) requester_url = request.user.author.get_url() if request.user.is_authenticated else None success, fetched_posts = sUtil.get_posts_by_author(author_url.split("/author/")[1], requester_url) if not success: return Response({ "query": "posts", "message": "Could not fetch posts", "success": False }, 502) pages = Paginator(fetched_posts["posts"], size) current_page = pages.page(page) response = { "query": "posts", "count": pages.count, "size": size, "posts": current_page.object_list } add_page_details_to_response(request, response, current_page, page - 1) return Response(response, status=200)
def get_author_summaries(authorUrls): summaries = [] localAuthors = [] for authorUrl in authorUrls: if (is_external_host(authorUrl)): # open a server util with the author url sUtil = ServerUtil(authorUrl=authorUrl) if not sUtil.valid_server(): print("authorUrl found, but not in DB", authorUrl) continue # We couldn't find a server that matches the friend URL base # split the id from the URL and ask the external server about them success, authorInfo = sUtil.get_author_info( authorUrl.split("/author/")[1]) if not success: continue # We couldn't successfully fetch from an external server # PITA Point: Some servers don't store their IDs as the actual # location where you can GET the author summary, just use the ID # if you don't want to hate yourself, even though HOST will be # the correct location to get the service. summaries.append({ "id": authorUrl, "host": sUtil.get_base_url(), "url": authorUrl, "displayName": authorInfo["displayName"] }) else: localAuthors.append(get_author_id(authorUrl)) authors = Author.objects.filter(pk__in=localAuthors) host = get_host_url() for author in authors: url = get_author_url(str(author.id)) summaries.append({ "id": url, "host": host, "url": url, "displayName": author.get_display_name() }) return summaries
def get_external_author_summary(authorUrl): # open a server util with the author url try: sUtil = ServerUtil(authorUrl=authorUrl) if not sUtil.valid_server(): print("authorUrl found, but not in DB", authorUrl) raise Exception("Invalid server") # split the id from the URL and ask the external server about them success, authorInfo = sUtil.get_author_info( authorUrl.split("/author/")[1]) if not success: raise Exception("Could not get author details") except Exception as e: print(e) return {"id": "", "host": "", "url": "", "displayName": ""} return { "id": authorUrl, "host": sUtil.get_base_url(), "url": authorUrl, "displayName": authorInfo["displayName"] }
def get_home_feed(self, request): size = int(request.query_params.get("size", DEFAULT_POST_PAGE_SIZE)) queryPage = int(request.query_params.get('page', 0)) if size < 1 or queryPage < 0 or size > 100: return Response( { "success": False, "message": "The query parameters were invalid", "query": "homeFeed" }, 400) if request.user.is_authenticated: requester_url = request.user.author.get_url() posts = Posts.objects.filter(author=request.user.author, unlisted=False) followed = Follow.objects.filter(follower=requester_url) localFollowedIds = [] externalPosts = [] for follow in followed: if (is_external_host(follow.followed)): external_host_url = follow.followed.split("/author/")[0] sUtil = ServerUtil(authorUrl=external_host_url) if not sUtil.valid_server(): print("authorUrl found, but not in DB", external_host_url) continue # We couldn't find a server that matches the friend URL base # split the id from the URL and ask the external server about them success, fetched_posts = sUtil.get_posts_by_author( follow.followed.split("/author/")[1], requester_url) if not success: continue # We couldn't successfully fetch from an external server externalPosts += fetched_posts["posts"] else: localFollowedIds.append(get_author_id(follow.followed)) posts |= Posts.objects.filter( author__id__in=localFollowedIds, unlisted=False).exclude(visibility="PRIVATE") posts |= Posts.objects.filter(author__id__in=localFollowedIds, unlisted=False, visibility="PRIVATE", visibleTo__contains=[requester_url]) viewable_posts = [] for post in posts: if (can_user_view(request.user, post)): viewable_posts.append(post) github_stream = get_github_activity(request.user.author) posts = merge_posts_with_github_activity(viewable_posts, github_stream) else: posts = Posts.objects.filter(visibility__in=["PUBLIC"], unlisted=False) externalPosts = [] try: # don't look at this if (len(externalPosts) > 0): serializer = PostsSerializer(posts, many=True, context={'request': request}) posts_to_return = serializer.data sorted_posts = sorted(externalPosts + posts_to_return, key=lambda x: x["published"], reverse=True) paginator = Paginator(sorted_posts, size) page = paginator.page(queryPage + 1) posts_to_return = page.object_list else: paginator = Paginator(posts, size) page = paginator.page(queryPage + 1) serializer = PostsSerializer(page, many=True, context={'request': request}) posts_to_return = serializer.data except Exception as e: print(e) posts_to_return = [] data = { "query": "homeFeed", "success": True, "posts": posts_to_return, "count": len(posts), "size": size } if len(posts_to_return) > 0: add_page_details_to_response(request, data, page, queryPage) return Response(data)