def stats_data(request): histo_type = request.GET.get('histogram_type') interval = request.GET.get("interval", "week") datefield = request.GET.get("datefield") individual_domain_limit = request.GET.get("individual_domain_limit[]") or 16 if not request.GET.get("enddate"): # datespan should include up to the current day when unspecified request.datespan.enddate += timedelta(days=1) params, __ = parse_args_for_es(request, prefix='es_') if histo_type == "domains": return json_response(get_domain_stats_data(params, request.datespan, interval=interval, datefield=datefield)) if params: domain_results = es_domain_query(params, fields=["name"], size=99999, show_stats=False) domains = [d["fields"]["name"] for d in domain_results["hits"]["hits"]] if len(domains) <= individual_domain_limit: domain_info = [{"names": [d], "display_name": d} for d in domains] elif len(domains) < ES_MAX_CLAUSE_COUNT: domain_info = [{"names": [d for d in domains], "display_name": _("Domains Matching Filter")}] else: domain_info = [{ "names": None, "display_name": _("All Domains (NOT applying filters. > %s projects)" % ES_MAX_CLAUSE_COUNT) }] else: domain_info = [{"names": None, "display_name": _("All Domains")}] stats_data = get_stats_data(domain_info, histo_type, request.datespan, interval=interval) return json_response(stats_data)
def run_query(self, size=None): self.es_params, _ = parse_args_for_es(self.request, prefix=self.es_prefix) results = self.es_query(self.es_params, size) self.es_facet_map = fill_mapping_with_facets(self.es_facet_mapping, results, self.es_params) self.es_response = results self.es_queried = True return self.es_response
def stats_data(request): histo_type = request.GET.get('histogram_type') interval = request.GET.get("interval", "week") datefield = request.GET.get("datefield") get_request_params_json = request.GET.get("get_request_params", None) get_request_params = ( json.loads(HTMLParser.HTMLParser().unescape(get_request_params_json)) if get_request_params_json is not None else {} ) stats_kwargs = { k: get_request_params[k] for k in get_request_params if k != "domain_params_es" } if datefield is not None: stats_kwargs['datefield'] = datefield domain_params_es = get_request_params.get("domain_params_es", {}) if not request.GET.get("enddate"): # datespan should include up to the current day when unspecified request.datespan.enddate += timedelta(days=1) domain_params, __ = parse_args_for_es(request, prefix='es_') domain_params.update(domain_params_es) domains = get_project_spaces(facets=domain_params) return json_response(get_stats_data( histo_type, domains, request.datespan, interval, **stats_kwargs ))
def stats_data(request): histo_type = request.GET.get('histogram_type') interval = request.GET.get("interval", "week") datefield = request.GET.get("datefield") get_request_params_json = request.GET.get("get_request_params", None) get_request_params = (json.loads( HTMLParser.HTMLParser().unescape(get_request_params_json)) if get_request_params_json is not None else {}) stats_kwargs = { k: get_request_params[k] for k in get_request_params if k != "domain_params_es" } if datefield is not None: stats_kwargs['datefield'] = datefield domain_params_es = get_request_params.get("domain_params_es", {}) if not request.GET.get( "enddate" ): # datespan should include up to the current day when unspecified request.datespan.enddate += timedelta(days=1) domain_params, __ = parse_args_for_es(request, prefix='es_') domain_params.update(domain_params_es) domains = get_project_spaces(facets=domain_params) return json_response( get_stats_data(histo_type, domains, request.datespan, interval, **stats_kwargs))
def stats_data(request): histo_type = request.GET.get('histogram_type') interval = request.GET.get("interval", "week") datefield = request.GET.get("datefield") individual_domain_limit = request.GET.get( "individual_domain_limit[]") or 16 if not request.GET.get( "enddate" ): # datespan should include up to the current day when unspecified request.datespan.enddate += timedelta(days=1) params, __ = parse_args_for_es(request, prefix='es_') if histo_type == "domains": return json_response( get_domain_stats_data(params, request.datespan, interval=interval, datefield=datefield)) if params: domain_results = es_domain_query(params, fields=["name"], size=99999, show_stats=False) domains = [d["fields"]["name"] for d in domain_results["hits"]["hits"]] if len(domains) <= individual_domain_limit: domain_info = [{"names": [d], "display_name": d} for d in domains] elif len(domains) < ES_MAX_CLAUSE_COUNT: domain_info = [{ "names": [d for d in domains], "display_name": _("Domains Matching Filter") }] else: domain_info = [{ "names": None, "display_name": _("All Domains (NOT applying filters. > %s projects)" % ES_MAX_CLAUSE_COUNT) }] else: domain_info = [{"names": None, "display_name": _("All Domains")}] stats_data = get_stats_data(domain_info, histo_type, request.datespan, interval=interval) return json_response(stats_data)
def appstore(request, template="appstore/appstore_base.html"): page_length = 10 include_unapproved = True if request.GET.get('is_approved', "") == "false" else False if include_unapproved and not request.user.is_superuser: raise Http404() params, _ = parse_args_for_es(request) page = params.pop('page', 1) page = int(page[0] if isinstance(page, list) else page) results = es_snapshot_query(params, SNAPSHOT_FACETS) hits = results.get('hits', {}).get('hits', []) hits = deduplicate(hits) d_results = [Domain.wrap(res['_source']) for res in hits] sort_by = request.GET.get('sort_by', None) if sort_by == 'best': d_results = Domain.popular_sort(d_results) elif sort_by == 'newest': pass else: d_results = Domain.hit_sort(d_results) persistent_params = {} if sort_by: persistent_params["sort_by"] = sort_by if include_unapproved: persistent_params["is_approved"] = "false" persistent_params = urlencode(persistent_params) # json.dumps(persistent_params) average_ratings = list() for result in d_results: average_ratings.append([result.name, Review.get_average_rating_by_app(result.copied_from._id)]) more_pages = False if len(d_results) <= page*page_length else True facet_map = fill_mapping_with_facets(SNAPSHOT_MAPPING, results, params) vals = dict( apps=d_results[(page-1)*page_length:page*page_length], page=page, prev_page=(page-1), next_page=(page+1), more_pages=more_pages, sort_by=sort_by, average_ratings=average_ratings, include_unapproved=include_unapproved, facet_map=facet_map, facets=results.get("facets", []), query_str=request.META['QUERY_STRING'], search_query=params.get('search', [""])[0], persistent_params=persistent_params, ) return render(request, template, vals)
def stats_data(request): histo_type = request.GET.get('histogram_type') interval = request.GET.get("interval", "week") datefield = request.GET.get("datefield") get_request_params_json = request.GET.get("get_request_params", None) get_request_params = ( json.loads(six.moves.html_parser.HTMLParser().unescape(get_request_params_json)) if get_request_params_json is not None else {} ) stats_kwargs = { k: get_request_params[k] for k in get_request_params if k != "domain_params_es" } if datefield is not None: stats_kwargs['datefield'] = datefield domain_params_es = get_request_params.get("domain_params_es", {}) if not request.GET.get("enddate"): # datespan should include up to the current day when unspecified request.datespan.enddate += timedelta(days=1) domain_params, __ = parse_args_for_es(request, prefix='es_') domain_params.update(domain_params_es) domains = get_project_spaces(facets=domain_params) try: return json_response(get_stats_data( histo_type, domains, request.datespan, interval, **stats_kwargs )) except HistoTypeNotFoundException: return HttpResponseBadRequest( 'histogram_type param must be one of <ul><li>{}</li></ul>' .format('</li><li>'.join(HISTO_TYPE_TO_FUNC)))
def deployments(request, template="appstore/deployments.html"): params, _ = parse_args_for_es(request) params = dict([(DEPLOYMENT_MAPPING.get(p, p), params[p]) for p in params]) page = int(params.pop('page', 1)) results = es_deployments_query(params, DEPLOYMENT_FACETS) d_results = [Domain.wrap(res['_source']) for res in results['hits']['hits']] more_pages = False if len(d_results) <= page * 10 else True facet_map = fill_mapping_with_facets(DEPLOYMENT_MAPPING, results, params) include_unapproved = True if request.GET.get('is_approved', "") == "false" else False vals = {'deployments': d_results[(page - 1) * 10:page * 10], 'page': page, 'prev_page': page - 1, 'next_page': (page + 1), 'more_pages': more_pages, 'include_unapproved': include_unapproved, 'facet_map': facet_map, 'query_str': request.META['QUERY_STRING'], 'search_url': reverse('deployments'), 'search_query': params.get('search', [""])[0]} return render(request, template, vals)
def deployments_api(request): params, facets = parse_args_for_es(request) params = dict([(DEPLOYMENT_MAPPING.get(p, p), params[p]) for p in params]) results = es_deployments_query(params, facets) return HttpResponse(json.dumps(results), content_type="application/json")
def params(self): params, _ = parse_args_for_es(self.request) params = dict([(DEPLOYMENT_MAPPING.get(p, p), params[p]) for p in params]) return params
def params(self): params, _ = parse_args_for_es(self.request) params.pop('page', None) return params
def appstore_api(request): params, facets = parse_args_for_es(request) results = es_snapshot_query(params, facets) return HttpResponse(json.dumps(results), mimetype="application/json")
def appstore(request, template="appstore/appstore_base.html"): page_length = 10 include_unapproved = True if request.GET.get('is_approved', "") == "false" else False if include_unapproved and not request.user.is_superuser: raise Http404() params, _ = parse_args_for_es(request) page = params.pop('page', 1) page = int(page[0] if isinstance(page, list) else page) results = es_snapshot_query(params, SNAPSHOT_FACETS) hits = results.get('hits', {}).get('hits', []) hits = deduplicate(hits) d_results = [] for res in hits: try: domain = Domain.wrap(res['_source']) if domain.copied_from is not None: # this avoids putting in snapshots in the list where the # copied_from domain has been deleted. d_results.append(domain) except CopiedFromDeletedException as e: notify_exception( request, message=( "Fetched Exchange Snapshot Error: {}. " "The problem snapshot id: {}".format( e.message, res['_source']['_id']) ) ) starter_apps = request.GET.get('is_starter_app', None) sort_by = request.GET.get('sort_by', None) if sort_by == 'newest': pass else: d_results = Domain.hit_sort(d_results) persistent_params = {} if sort_by: persistent_params["sort_by"] = sort_by if include_unapproved: persistent_params["is_approved"] = "false" persistent_params = urlencode(persistent_params) # json.dumps(persistent_params) more_pages = False if len(d_results) <= page * page_length else True facet_map = fill_mapping_with_facets(SNAPSHOT_MAPPING, results, params) vals = dict( apps=d_results[(page - 1) * page_length:page * page_length], page=page, prev_page=(page - 1), next_page=(page + 1), more_pages=more_pages, sort_by=sort_by, show_starter_apps=starter_apps, include_unapproved=include_unapproved, facet_map=facet_map, facets=results.get("facets", []), query_str=request.META['QUERY_STRING'], search_query=params.get('search', [""])[0], persistent_params=persistent_params, ) return render(request, template, vals)