def make_user(pk, schemas, force): ch_key = "term__{}".format(pk) if force or ch_key not in cache: data = query_maker.query_user(pk, {}) cache.set(ch_key, data, timeout=CACHE_TTL * 12) else: data = cache.get(ch_key) return clone_dict(data, schemas, None)
def make_term(hostname, pk, schemas, force): ch_key = "{}__term_{}".format(hostname, pk) if force or ch_key not in cache: data = query_maker.query_term(pk) data["to"] = get_url_pattern(hostname, "taxonomy", data) cache.set(ch_key, data, timeout=CACHE_TTL * 12) else: data = cache.get(ch_key) return clone_dict(data, schemas, None)
def public_init(request, app_host): schema = request.data.get("schema") if request.data.get("schema") else [ "id" ] out = clone_dict( { "p": caching.make_init( request.GET.get("force") == "true", app_host), "u": None }, schemas=schema) return Response(out)
def public_post(request, app_host, slug): schema = request.data.get("schema") if request.data.get("schema") else [ "id" ] out = caching.make_post(request.GET.get("force") == "true", host_name=app_host, index=slug, query={ "show_cms": True, "master": True, "uid": request.GET.get("uid", False) }) return Response(clone_dict(out, schemas=schema, out=None))
def maker_pub(hostname, query, force): key_path = "{}_{}".format("pub", hostname) if force or key_path not in cache: data = query_maker.query_publication(hostname) cache.set(key_path, data, timeout=CACHE_TTL * 12) else: data = cache.get(key_path) if type(data) is Publication: data = query_maker.query_publication(hostname) cache.set(key_path, data, timeout=CACHE_TTL * 12) taxonomies = list(map(lambda x: x.get("label"), data.get("options").get("taxonomies"))) if query.get("is_page"): for tax in taxonomies: data[tax] = make_terms(hostname, {"taxonomy": tax}, force) if query and query.get("schema"): data = clone_dict(data, query.get("schema"), None) return data
def public_page(request, app_host): schema = request.data.get("schema") if request.data.get("schema") else [ "id" ] params = request.data.get("param") or {} if request.data.get( "param") is not None else {} page_size = params.get('page_size', 10) page = params.get('page', 1) out = caching.make_page(request.GET.get("force") == "true", app_host, query={ "terms": params.get("terms"), "post_type": params.get("post_type"), "page_size": page_size, "offset": page_size * page - page_size, "order": params.get("order", "popular"), "full": params.get("full", None) }) return Response(clone_dict(out, schemas=schema, out=None))
def make_post(hostname, pk, schemas, force): ch_key = "{}__post_{}".format(hostname, pk) if force or ch_key not in cache: data = query_maker.query_post_detail(pk) data["to"] = get_url_pattern(hostname, "post_type", data) cache.set(ch_key, data, timeout=CACHE_TTL * 12) else: data = cache.get(ch_key) out_data = clone_dict(data, schemas, None) flat_schemas = list(map(lambda x: flat_schema(x), schemas)) if "user" in flat_schemas and data.get("user_id"): x_schemas = schemas[flat_schemas.index("user")] if type(x_schemas) is str: x_schemas = default_schemas.USER_DETAIL else: x_schemas = x_schemas["user"] out_data["user"] = make_user(data.get("user_id"), x_schemas, False) if "terms" in flat_schemas and data.get("terms"): terms_schemas = schemas[flat_schemas.index("terms")] if type(terms_schemas) is str: terms_schemas = default_schemas.TERM_LIST else: terms_schemas = terms_schemas["terms"] out_data["terms"] = list( map( lambda x: make_term(hostname, x, terms_schemas, False), data["terms"] ) ) if "related" in flat_schemas and data.get("related"): x_schemas = schemas[flat_schemas.index("related")] if type(x_schemas) is str: x_schemas = default_schemas.POST_LIST else: x_schemas = x_schemas["related"] out_data["related"] = list( map( lambda x: make_post(hostname, x, x_schemas, False), data["related"] ) ) return out_data
def graph_v2(request): hostname = request.GET.get("host", None) if hostname is None: return Response(status=status.HTTP_400_BAD_REQUEST) out = {} # type{post_list|post_detail|term_list|term_detail|user_detail|user_list} # params{page, page_size, order, value} query = request.data.get("query") schemas = request.data.get("schemas") force = filter_query.query_boolean(request.GET.get("force"), False) if query.get("type") == "post_list": out = caching_v2.make_posts(hostname, query=query, force=force) elif query.get("type") == "post_detail": out = caching_v2.make_post(hostname, { "instance": query.get("value"), "is_page": True, "is_pid": query.get("is_pid") }, force=force) elif query.get("type") == "term_list": out = caching_v2.make_terms(hostname, query=query, force=force) elif query.get("type") == "home": out = caching_v2.make_home(hostname, query=query, force=force) elif query.get("type") == "term_detail": out = caching_v2.make_term(hostname, { "instance": query.get("value"), "taxonomy": query.get("taxonomy"), "order": query.get("order"), "post_type": query.get("post_type"), "page": query.get("page"), "is_page": True }, force=force) elif query.get("type") == "user_detail": out = caching_v2.make_user(hostname, query, force=force) out = clone_dict(out, schemas, None) return Response(out)
def fetch_taxonomies(request, app_id): pub = Publication.objects.get(pk=app_id) if request.method == "GET": search = request.GET.get('search') page_size = 10 if request.GET.get('page_size') is None else int( request.GET.get('page_size')) page = 1 if request.GET.get('page') is None else int( request.GET.get('page')) auth_id = request.user.id if request.user.is_authenticated else None user_id = request.get("user") taxonomy = request.get("taxonomy") meta = request.get("meta") related = request.GET.get('related_terms').split( ",") if request.GET.get('related_terms') else None q = Q(show_cms=True, publication_id=app_id) if taxonomy: q = q & Q(taxonomy=taxonomy) if related: q = q & Q(related__id__in=related) if search: q = q & Q(term__title__icontains=search) | Q( term__description__icontains=search) queryset = models.PublicationTerm.objects.order_by("-id").values_list( "id", flat=True).filter(q) paginator = Paginator(queryset, page_size) terms = list(paginator.page(page).object_list) results = [] for item in terms: results.append( clone_dict( caching_v2.make_term(pub.host, {"instance": item}, False), ["id", "term", "taxonomy"], None)) return Response(status=status.HTTP_200_OK, data={ "results": results, "count": queryset.count() })
def graph(request): hostname = request.GET.get("host", None) user = request.user.id if request.user.is_authenticated else None if hostname is None: return Response(status=status.HTTP_400_BAD_REQUEST) if request.method == "POST": out = {} query = request.data.get("query") force = filter_query.query_boolean(request.GET.get("force"), False) for q in query: # INIT params = q.get("p") or {} schemas = q.get("s") or ["id"] instance_related = None instance_post_related = None pub_term = None # PREPARE if params.get("related"): instance_related = fetch_instance(hostname, params.get("related"), False) if params.get("post_related"): instance_post_related = fetch_instance( hostname, params.get("post_related"), False) if params.get("taxonomy") and params.get("term"): pub_term = models.PublicationTerm.objects.filter( taxonomy=params.get("taxonomy"), publication__host=hostname, term__slug=params.get("term")).first() # HANDLE QUERY if q.get("q") == "post_detail": instance = None if params.get("slug"): instance = fetch_instance(hostname, params.get("slug"), params.get("pid")) out[q.get("o")] = clone_dict( caching.make_post(force, hostname, instance, {"master": True}), schemas, None) if q.get("q") == "post_list": excluded_posts = [] if pub_term and pub_term.meta is not None: excluded_posts = pub_term.meta.get("excluded_posts", None) page_size = params.get('page_size', 10) page = params.get('page', 1) out[q.get("o")] = clone_dict( caching.make_post_list( force, hostname, { "page_size": page_size, "offset": page_size * page - page_size, "post_type": params.get("post_type"), "post_related": instance_post_related, "related": instance_related, "master": True, "order": params.get("order", "newest"), "term": params.get("term"), "taxonomy": params.get("taxonomy"), "reverse": params.get("reverse"), "user_id": params.get("user"), "show_cms": params.get("show_cms", None), "publications": params.get("publications"), "excluded_posts": excluded_posts }), schemas, None) if q.get("q") == "archive": page_size = params.get('page_size', 10) page = params.get('page', 1) out[q.get("o")] = clone_dict( caching.make_page(force, hostname, query={ "post_related": instance_post_related, "terms": params.get("terms"), "post_type": params.get("post_type"), "page_size": page_size, "term_page_size": params.get("term_page_size"), "offset": page_size * page - page_size, "order": params.get("order", "popular"), "full": params.get("full", None), "is_related_expanded": params.get("expanded_related"), "user_id": params.get("user"), "show_cms": params.get("show_cms", None), "publications": params.get("publications") }), schemas, None) if q.get("q") == "term_list": related = params.get("related", None) if related is None and params.get( "related_term") and params.get("related_taxonomy"): test = PublicationTerm.objects.filter( publication__host=hostname, term__slug=params.get("related_term"), taxonomy=params.get("related_taxonomy")).first() if test: related = test.id page_size = params.get('page_size', 10) page = params.get('page', 1) out[q.get("o")] = clone_dict( caching.make_term_list(force, hostname, query={ "search": params.get("search"), "taxonomy": params.get("taxonomy"), "page_size": page_size, "offset": page_size * page - page_size, "order": params.get("order", "popular"), "related": related, "reverse": params.get("reverse", False), "show_cms": params.get("show_cms", None), "publications": params.get("publications"), "featured": params.get("featured", None), }), schemas, None) if q.get("q") == "term_detail": pub_term_id = params.get("id", None) if pub_term_id is None: if params.get("taxonomy") and params.get("term"): pt = PublicationTerm.objects.filter( publication__host=hostname, taxonomy=params.get("taxonomy"), term__slug=params.get("term")).first() if pt is not None: pub_term_id = pt.id out[q.get("o")] = clone_dict( caching.make_term(force, pub_term_id, True), schemas, None) if q.get("q") == "user_detail": out[q.get("o")] = clone_dict( query_user(params.get("user"), {}), schemas, None) return Response(out)
def fetch_posts(request, app_id): pub = Publication.objects.get(pk=app_id) if request.method == "GET": search = request.GET.get('search') page_size = 10 if request.GET.get('page_size') is None else int( request.GET.get('page_size')) page = 1 if request.GET.get('page') is None else int( request.GET.get('page')) auth_id = request.user.id if request.user.is_authenticated else None user_id = request.GET.get("user") post_type = request.GET.get("post_type") terms = request.GET.get("terms").split(",") if request.GET.get( "terms") else [] related_posts = request.GET.get("related_posts").split( ",") if request.GET.get("related_posts") else [] meta = request.GET.get("meta") q = Q(show_cms=True, primary_publication_id=app_id) if user_id: q = q & Q(user_id=user_id) if post_type: q = q & Q(post_type=post_type) if terms: q = q & Q(terms__id__in=terms) if search: q = q & (Q(title__icontains=search) | Q(description__icontains=search)) if related_posts: q = q & Q(post_related__id__in=related_posts) queryset = models.Post.objects.order_by("-id").values_list( "id", flat=True).filter(q) paginator = Paginator(queryset, page_size) posts = list(paginator.page(page).object_list) results = [] for post in posts: results.append( clone_dict( caching_v2.make_post(pub.host, {"instance": post}, False), [ "title", "id", "post_type", "description", "media", "slug" ], None)) return Response(status=status.HTTP_200_OK, data={ "results": results, "count": queryset.count() }) if request.method == "POST": if pub.options.get("allow_guess_post", False): meta = request.data.get("meta", {}) post = Post.objects.create( primary_publication=pub, status="POSTED", show_cms=pub.options.get("auto_guess_public", False) or (request.user.is_authenticated and request.user.is_staff), post_type=request.data.get("post_type", "article"), title=request.data.get("title", "Untitled"), description=request.data.get("description"), content=request.data.get("content"), user=request.user if request.user.is_authenticated else None, meta=meta, is_guess_post=True) if request.data.get("post_related_add", None) is not None: for p in request.data.get("post_related_add", None): pr = Post.objects.get(pk=p) post.post_related.add(pr) if pub.options.get("auto_guess_public", False): for order in ["p", "n"]: key_path_post = "term_{}_{}_{}".format( p.id, post.post_type, order) ids = p.make_posts(post.post_type, order) cache.set(key_path_post, list(ids), timeout=CACHE_TTL) if request.data.get("terms_add", None) is not None: prs = PublicationTerm.objects.filter( id__in=request.data.get("terms_add", [])) for p in prs: post.terms.add(p) if pub.options.get("auto_guess_public", False): for order in ["p", "n"]: key_path_post = "term_{}_{}_{}".format( p.id, post.post_type, order) ids = p.make_posts(post.post_type, order) cache.set(key_path_post, list(ids), timeout=CACHE_TTL) if request.user.is_authenticated: actions.follow(request.user, post) if pub.options.get("auto_guess_public", False): for order in ["p", "n"]: key_path = "{}_{}_{}".format(post.primary_publication.host, post.post_type, order) ids = post.primary_publication.make_posts( post.post_type, order) cache.set(key_path, ids, timeout=CACHE_TTL * 12) return Response(status=status.HTTP_200_OK, data=caching.make_post(True, None, str(post.id), {"master": True})) return Response(status=status.HTTP_401_UNAUTHORIZED)