def _make_queryset(q: QuerySet, search: dict, fields_paramname: str, exclude_paramname: str, orderby_paramname: str, distinct_paramname: str, keyval: str, keyor: str) -> Q: filters = search.get(fields_paramname) ftype = type(filters) if dict == ftype: query = _make_query(filters, keyval, keyor) if query is not None: q = q.filter(**filters) elif Q == ftype: q = q.filter(filters) filters = search.get(exclude_paramname) ftype = type(filters) if dict == ftype: query = _make_query(filters, keyval, keyor) if query is not None: q = q.exclude(**filters) elif Q == ftype: q = q.exclude(filters) filters = search.get(orderby_paramname) if type(filters) == list: q = q.order_by(*filters) else: q = q.order_by(str(filters)) filters = search.get(distinct_paramname) if type(filters) == list: q = q.distinct(*filters) else: q = q.distinct(str(filters)) return q
def order_by(self, *field_names): if 'title' in field_names: field_list = [field.replace('title','lower_title') for field in field_names] self = self.extra(select={'lower_title': 'lower(title)'}) return QuerySet.order_by(self, *field_list) return QuerySet.order_by(self, *field_names)
def apply_sort(cls, args: dict, qs: QuerySet) -> QuerySet: order = args.get('sort') if order: qs = qs.order_by(order) return qs
def sort(cls, sort_param=None, collection: QuerySet = None) -> QuerySet: """ Retrieve a sorted queryset :param rest_framework.serializers.SerializerMetaclass cls: A class object :param str sort_param: A comma-separated list of properties to sort by :param django.db.models.query.QuerySet queryset: A collection of model objects :return: A sorted queryset :rtype: django.db.models.query.QuerySet queryset """ # Assuming collection is a QuerySet if not sort_param: return collection sort_fields = list(filter(None, sort_param.replace(".", "__").split(","))) # validate the sort fields actually exist in the model field_names = getattr(cls.Meta, "sort_fields", cls.Meta.fields) field_names += ("id",) test_fields = [ field[1:] if field[0] in ("-", "+") else field for field in sort_fields ] invalid_fields = set(test_fields).difference(field_names) if invalid_fields: raise ParseError( "Invalid field(s) for sort: {}".format(",".join(invalid_fields)) ) return collection.order_by(*sort_fields)
def sort_models(model_type: API_Model, models: QuerySet, request_params: dict) -> QuerySet: ''' Sorts a queryset of models according to the parameters sent in an HTTP request. Ensure fields supplied in the sort exist for the model. --> model_type Type[API_Model] : The type (class) of the model being updated. Used to validate the sort parameters. --> models : The queryset of models to apply the sort to. --> request_params : The parameters sent with the request (in querystring or body). <-- A queryset containing the sorted models. ''' model_sort = request_params.get('sort', {}) if model_sort != {}: if type(model_sort) == str: model_sort = parse_query_params(model_sort) validate_fields([param if param[0] != '-' else param[1::] for param in model_sort], model_type) return models.order_by(*model_sort) return models
def cite(self, request: HttpRequest, queryset: QuerySet): cite_keys = queryset.order_by('cite_key').values_list( 'cite_key', flat=True).distinct() cite_str = ", ".join(list(cite_keys)) self.message_user(request, f"\\cite{{{cite_str}}}", level=messages.SUCCESS)
def filter_games(user_games: QuerySet, request: HttpRequest) -> Tuple[QuerySet, str, List[str]]: sort_by = request.GET.get("sort_by", constants.SORT_BY_GAME_NAME) try: order_by = constants.SORT_FIELDS_MAPPING[sort_by] except KeyError: order_by = constants.SORT_FIELDS_MAPPING[constants.SORT_BY_GAME_NAME] usergames_queryset = user_games.order_by(*order_by) return usergames_queryset, sort_by, order_by
def test_professional_tags_list( client_with_token: Client, professional_tags: QuerySet, ): """Should return a professional tag names list.""" obj = professional_tags.order_by("name").first() response = client_with_token.get(reverse("professional-tags-list")) data = response.json() assert response.status_code == 200 assert data["count"] == OBJECTS_TO_CREATE * 4 assert data["results"][0]["name"] == obj.name
def order_articles(self, articles: QuerySet): """By number of comments""" if len(articles) == 0: articles = [] elif len(articles) == 1: articles = list(articles) elif len(articles) <= 30: articles = articles.annotate( num_comments=Count('comment')).order_by('-num_comments') else: articles = articles.order_by('-last_edit')[random.randint(20, 30)] articles.annotate( num_comments=Count('comment')).order_by('-num_comments') return list(articles)
def test_rates_list( client_with_token: Client, rates: QuerySet, ): """Should return a rates list.""" obj = rates.order_by("currency").first() response = client_with_token.get(reverse("rates-list")) data = response.json() assert response.status_code == 200 assert len(data) == 3 assert data[0]["currency"] == obj.currency assert data[0]["value"] == str(obj.value) assert data[0]["title"] == "Canadian Dollar" assert data[0]["sign"] == "C$" assert "CANADA" in data[0]["countries"] assert data[1]["title"] == "Euro" assert data[1]["sign"] == "€" assert "FRANCE" in data[1]["countries"]
def sort_queryset(qs: QuerySet, request: HttpRequest): sense_fields = [ f.name for f in Sense._meta.get_fields() if not f.remote_field ] order_by = request.GET.get('order-by') direction = request.GET.get('dir') if not order_by: return qs if order_by in sense_fields: order_by = f'senses__{order_by}' if direction == 'desc': order_by = f'-{order_by}' qs = qs.order_by(order_by) return qs
def _filter_and_exclude_games(user_games: QuerySet, request: HttpRequest) -> Tuple[QuerySet, QuerySet, str, str]: sort_by = request.GET.get("sort_by", constants.SORT_BY_GAME_NAME) try: order_by = constants.SORT_FIELDS_MAPPING[sort_by] except KeyError: order_by = constants.SORT_FIELDS_MAPPING[constants.SORT_BY_GAME_NAME] # Querystring takes precedence to allow explicit sorting/showing back even if cookie is set exclude = request.GET.get("exclude", None) or request.COOKIES.get(constants.USER_OPTIONS_EXCLUDE_COOKIE_NAME, None) exclude_kwargs = None # type: Any try: exclude_kwargs = constants.EXCLUDE_FIELDS_MAPPING[exclude] except KeyError: exclude = None usergames_queryset = user_games.order_by(*order_by) if exclude: return user_games.exclude(**exclude_kwargs).order_by(*order_by), usergames_queryset, sort_by, exclude else: return usergames_queryset, usergames_queryset, sort_by, ""
def api_department_list(request): if request.method == 'GET': _page = request.GET.get('_page', 0) _limit = request.GET.get('_limit', 10) _sort = request.GET.get('_sort') _order = request.GET.get('_order') dept_no__contains = request.GET.get('dept_no__contains') dept_name__contains = request.GET.get('dept_name__contains') form = DepartmentListParamsForm( dict(_page=_page, _limit=_limit, _order=_order, _sort=_sort, dept_no__contains=dept_no__contains, dept_name__contains=dept_name__contains)) if form.is_valid(): params = form.cleaned_data _page = params.get('_page') _limit = params.get('_limit') _sort = params.get('_sort') _order = params.get('_order') dept_no__contains = params.get('dept_no__contains') dept_name__contains = params.get('dept_name__contains') offset = _page * _limit end = offset + _limit qs = QuerySet(Department) if dept_no__contains: qs = qs.filter(dept_no__contains=dept_no__contains) if dept_name__contains: qs = qs.filter(dept_name__contains=dept_name__contains) count = qs.all().count() if _sort and _order: if _order == 'asc': qs = qs.order_by(f'{_sort}') else: qs = qs.order_by(f'-{_sort}') departments = qs.all()[offset:end] return JsonResponse(serialize_queryset(departments, exclude=['employees']), safe=False, headers={'X-TOTAL-COUNT': count}) else: return JsonResponse({'error': 'validate error'}, status=400) elif request.method == 'POST': dept_no = request.POST.get('dept_no') dept_name = request.POST.get('dept_name') dept = dict(dept_no=dept_no, dept_name=dept_name) form = DepartmentForm(dept) if form.is_valid(): try: dept = Department.objects.create(dept_no=dept_no, dept_name=dept_name) return JsonResponse( serialize_model(dept, exclude=['employees'])) except IntegrityError: return JsonResponse({'error': 'integrity error'}, status=400) else: return JsonResponse({'error': 'validate error'}, status=400)
def order_by(self, *fields): # Django only allows to order querysets by direct fields and # foreign-key chains. In order to bypass this behaviour and order # by EAV attributes, it is required to construct custom order-by # clause manually using Django's conditional expressions. # This will be slow, of course. order_clauses = [] query_clause = self config_cls = self.model._eav_config_cls for term in [t.split('__') for t in fields]: # Continue only for EAV attributes. if len(term) == 2 and term[0] == config_cls.eav_attr: # Retrieve Attribute over which the ordering is performed. try: attr = Attribute.objects.get(slug=term[1]) except ObjectDoesNotExist: raise ObjectDoesNotExist( 'Cannot find EAV attribute "{}"'.format(term[1])) field_name = 'value_%s' % attr.datatype pks_values = Value.objects.filter( # Retrieve pk-values pairs of the related values # (i.e. values for the specified attribute and # belonging to entities in the queryset). attribute__slug=attr.slug, entity_id__in=self ).order_by( # Order values by their value-field of # appriopriate attribute data-type. field_name ).values_list( # Retrieve only primary-keys of the entities # in the current queryset. 'entity_id', field_name) # Retrive ordered values from pk-value list. _, ordered_values = zip(*pks_values) # Add explicit ordering and turn # list of pairs into look-up table. val2ind = dict(zip(ordered_values, count())) # Finally, zip ordered pks with their grouped orderings. entities_pk = [(pk, val2ind[val]) for pk, val in pks_values] # Using ordered primary-keys, construct # CASE clause of the form: # # CASE # WHEN id = 2 THEN 1 # WHEN id = 5 THEN 2 # WHEN id = 9 THEN 2 # WHEN id = 4 THEN 3 # END # when_clauses = [When(id=pk, then=i) for pk, i in entities_pk] order_clause = Case(*when_clauses, output_field=IntegerField()) clause_name = '__'.join(term) # Use when-clause to construct # custom order-by clause. query_clause = query_clause.annotate( **{clause_name: order_clause}) order_clauses.append(clause_name) elif len(term) >= 2 and term[0] == config_cls.eav_attr: raise NotSupportedError( 'EAV does not support ordering through ' 'foreign-key chains') else: order_clauses.append(term[0]) return QuerySet.order_by(query_clause, *order_clauses)
class QueryBuilder: """ Name : QueryBuilder Input : Model and request Desc : Generic takes model and applies filter,group and aggregation return json response upon calling build method """ def __init__(self, model, request): "Default intitailizer to initaialize the default values of the variables" self.agg = request.GET.get('agg', []) self.group = request.GET.get('group', []) self.where = request.GET.get('where', {}) self.sort = request.GET.get('sort', []) self.model = model self.qs = QuerySet(model) self.agg_literal_map = {'+': Sum, '-': Avg, '*': Count} self.agg_map = {} def _get_agg_col_mapping(self, column): """ Input : column Ouput: If the first char starts any one of the below then below mappings are applied or by default Sum is applied + -> Sum - -> Avg * -> Count e.g,: +col1 -> Avg(col1) """ _aggregation = Sum _column = column if (column[0] in self.agg_literal_map): _aggregation = self.agg_literal_map[column[0]] _column = column[1:] self.agg_map[_column] = _aggregation(_column) def _get_agg(self): """ Splits the agg param from the request and _get_agg_col_mapping is invoked to construct aggregation map """ if self.agg is not None: columns = self.agg.split(",") for column in columns: self._get_agg_col_mapping(column) return self.agg_map def _get_group(self): """ Splits the group param from the request otherwise returns empty list """ if len(self.group): return [field for field in self.group.split(",")] return EMPTY_LIST def _get_where(self): """ Converts the JSON type string where clause to dict Custmisation done only for date_to and date_from which are mapped to out of the box date__gte and date__lte fields """ if self.where: json_where_clause = json.loads(self.where) if 'date_from' in json_where_clause: json_where_clause["date__gte"] = json_where_clause["date_from"] del json_where_clause["date_from"] if 'date_to' in json_where_clause: json_where_clause["date__lte"] = json_where_clause["date_to"] del json_where_clause["date_to"] return json_where_clause return EMPTY_DICT def _get_order_by(self): """ If Sort param is present then splits otherwise return emptylist """ if self.sort: return self.sort.split(",") return EMPTY_LIST def build(self): """ From queryset applies filter,group,sort and agg methods accordingly """ is_group_by = False # caluclated field CPI cpi = ExpressionWrapper(F('spend') / F('installs'), output_field=FloatField()) self.qs = self.qs.annotate(cpi=cpi) # Aggreation are applied here if len(self._get_group()) > 0 and len(self._get_agg()) > 0: is_group_by = True self.qs = self.qs.values(*self._get_group()) \ .annotate(**self._get_agg()) # Where clause if applied here if len(self._get_where()) > 0: self.qs = self.qs.filter(**self._get_where()) # Sorting is applied if len(self._get_order_by()) > 0: self.qs = self.qs.order_by(*self._get_order_by()) # return the list of PeformanceMetric user if is_group_by: return list(self.qs) else: self.qs = self.qs.values() return list(self.qs)
def add_orderings(qs: QuerySet, orderings: str) -> QuerySet: orderings = orderings.split(',') qs = qs.order_by(*orderings) return qs
def publisher_advertisers(request, view='my', page=None, template='publisher/advertisers/index.html'): ''' View to manage Publisher's Advertisers. This view has three different resultsets based on the URL specified. '/my/' displays this Publishers Advertisers. '/find/' provides a search interface to all Advertisers, and '/expired/' displays this Publisher's expired Advertisers. This View has sortable result columns based on the GET variable 'sort' and the template displays different actions for each View based upon the form variable "view" which is derived from the URL. ''' from atrinsic.base.models import Organization from atrinsic.util.xls import write_rows from forms import AdvertiserSearchForm import tempfile from django.db.models import Q q = None vertical = None sort_next = '#' download = False if request.GET: form = AdvertiserSearchForm(request.organization, request.GET) if form.is_valid(): q = form.cleaned_data.get('q', None) vertical = form.cleaned_data.get('vertical', None) min_rating = form.cleaned_data.get('network_rating', None) date_from = form.cleaned_data.get('date_from', None) date_to = form.cleaned_data.get('date_to', None) if (vertical == '-1') or len(vertical) < 1: vertical = None if view == 'my': settab(request, "Publisher", "Advertisers", "My Advertisers") # My Advertisers qs = Organization.objects.filter( publisher_relationships__status=RELATIONSHIP_ACCEPTED, status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization ).extra( select={ "publisher_id": "select publisher_id from base_organization where id=" + str(request.organization.id) }) if request.GET.get('download', None) is not None: template = 'publisher/advertisers/download.csv' download = True elif view == 'expired': settab(request, "Publisher", "Advertisers", "Expired Advertisers") qs = Organization.objects.filter( publisher_relationships__status__in=[ RELATIONSHIP_EXPIRED, RELATIONSHIP_DECLINED ], status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization ).extra( select={ "publisher_id": "select publisher_id from base_organization where id=" + str(request.organization.id) }) else: # Default Advertiser Finder qs = Organization.objects.filter(org_type=ORGTYPE_ADVERTISER, status=ORGSTATUS_LIVE).filter( has_program_term=True, is_private=False) if q is not None: qs = qs.filter( (Q(show_alias=True) & Q(company_alias__icontains=q)) | (Q(show_alias=False) & Q(company_name__icontains=q))) if vertical is not None: qs = qs.filter(vertical__order=vertical) else: qs = qs.filter(is_adult=request.organization.is_adult) if min_rating is not None: qs = qs.filter(network_rating__gte=str(min_rating)) if form.cleaned_data.get('email_marketing', False): qs = qs.filter(allow_third_party_email_campaigns=True) if form.cleaned_data.get('direct_linking', False): qs = qs.filter(allow_direct_linking_through_ppc=True) if form.cleaned_data.get('trademark_bidding', False): qs = qs.filter(allow_trademark_bidding_through_ppc=True) if date_from is not None: qs = qs.filter(date_joined__gte=date_from) if date_to is not None: qs = qs.filter(date_joined__lte=date_to) else: qs = QuerySet() else: form = AdvertiserSearchForm(organization=request.organization) if view == 'my': settab(request, "Publisher", "Advertisers", "My Advertisers") # My Advertisers qs = Organization.objects.filter( publisher_relationships__status=RELATIONSHIP_ACCEPTED, status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization ).extra( select={ "publisher_id": "select publisher_id from base_organization where id=" + str(request.organization.id) }) elif view == 'expired': settab(request, "Publisher", "Advertisers", "Expired Advertisers") qs = Organization.objects.filter( publisher_relationships__status__in=[ RELATIONSHIP_EXPIRED, RELATIONSHIP_DECLINED ], status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization ).extra( select={ "publisher_id": "select publisher_id from base_organization where id=" + str(request.organization.id) }) else: # XXX qs = Organization.objects.none() sort = request.GET.get('sort', 'date_joined').lower() if qs.model and qs.count(): display_results = True sort_fields = [ 'ticker', 'company_name', 'state', 'country', 'vertical', 'network_rating', 'date_joined', 'contact_firstname', 'force', ] for f in sort_fields: if sort.endswith(f): if sort.startswith('-'): sort_next = sort[1:] else: sort_next = '-%s' % sort qs = qs.order_by(sort) break # if finding, remove all the advertisers that have outstanding relationships if view == 'find': result = [] for i in qs: rs = i.get_advertiser_relationship(request.organization) if rs == None: result.append(i) elif rs.status == RELATIONSHIP_NONE: result.append(i) total_results = len(result) qs = result else: total_results = qs.count() else: if view == 'expired': display_results = True total_results = 0 elif form.is_valid(): display_results = True total_results = 0 else: display_results = False total_results = 0 if download == True: file_id, file_path = tempfile.mkstemp() res = [[ 'Ticker', 'Company Name', 'State', 'Country', 'Vertical', 'Network Rating', 'Date Joined', 'Force' ]] for row in qs: res.append([ str(row.ticker), str(row.company_name), str(row.state), str(row.country), str(row.vertical), str(row.network_rating), str(row.date_joined), str(row.force) ]) write_rows(file_path, res) res = open(file_path).read() response = HttpResponse(res, mimetype="application/vnd.ms-excel") response[ 'Content-Disposition'] = 'attachment; filename=myadvertisers.xls' return response else: return object_list(request, queryset=qs, allow_empty=True, page=page, template_name='publisher/advertisers/index.html', paginate_by=50, extra_context={ 'q': q, 'display_results': display_results, 'form': form, 'sort': sort, 'sort_next': sort_next, 'total_results': total_results, 'view': view, })
def createDataframe( self, tick, all_Stockitems: QuerySet, callDate: datetime.datetime, countIndex=0, krbottomline=CONF.KR_BOTTOMLINE, krvolumefilter=CONF.KR_TOP_VOLUME_FILTER, ) -> [pd.DataFrame, pd.DataFrame, pd.DataFrame]: globalDataframeMain = CF.generateEmptyDataframe("Main") globalDataframePredictions = CF.generateEmptyDataframe("Prediction") globalDataframeWindow = CF.generateEmptyDataframe("Window") # startDate = CF.getNextPredictionDate( # callDate - datetime.timedelta(CONF.TOTAL_REQUEST_DATE_LENGTH) # ) startDate = CF.getStartFetchingDate( callStartDate=callDate, fetchingLength=CONF.PREDICTION_WINDOW_LENGTH) endDate = CF.getEndFetchingDate(callEndDate=callDate) if countIndex % 100 == 0: logger.info(f"MainWrapperKR - createDataframe : {countIndex}") logger.info( f"MainWrapperKR - createDataframe; start:{startDate}, end:{endDate}" ) try: tmpStockTicksByVolumeFilter = ( all_Stockitems.order_by('-volume').values_list( 'stock_name__stock_tick__stock_tick', flat=True).distinct() ) # 한번이라도 상위에 들은 거래량인 경우 -> CONF.KR_TOP_VOLUME_FILTER tmpQuery = all_Stockitems.filter( Q(stock_name__stock_tick__stock_tick__in= tmpStockTicksByVolumeFilter[:int( len(tmpStockTicksByVolumeFilter) * krvolumefilter)]) & Q(stock_name__stock_tick__stock_tick=tick) & Q(reg_date__range=(startDate, endDate)) # get only needed dates & ( ~( # Remove unwanted (거래 중지) Q(open=0) | Q(high=0) | Q(low=0) | Q(close=0))) & ( Q(close__gte=krbottomline) # applies only for first object )).order_by('-reg_date').distinct() if not tmpQuery.exists(): raise Exception(f"DB Empty for tick : {tick}") if countIndex % 100 == 0: logger.info( f"MainWrapperKR - createDataframe; Query return length check : {len(tmpQuery)}" ) previousAnswer = None for idx, stockitem in enumerate(tmpQuery): tmp_regDate = stockitem.reg_date tmp_regDatetime = datetime.datetime(tmp_regDate.year, tmp_regDate.month, tmp_regDate.day) tmpInsertData = { 'section_integer': int(stockitem.stock_map_section.section_name. section_integer), 'total_sum': int(stockitem.stock_map_section.total_sum / CONF.STOCK_NUM_NORMALIZER), 'time_elapsed': abs(int((callDate - tmp_regDatetime).days)), 'open': stockitem.open, 'high': stockitem.high, 'low': stockitem.low, 'close': stockitem.close, 'gap': stockitem.close - stockitem.open, 'volume': stockitem.volume, 'div': stockitem.div, 'per': stockitem.per, 'pbr': stockitem.pbr, 'market_name': CF.getMarketNumber( stockitem.stock_name.stock_tick.stock_marketName), 'roe': stockitem.roe, 'tick': str(tick) } if idx == 0: # first index which needs predictions globalDataframePredictions = globalDataframePredictions.append( tmpInsertData, ignore_index=True) previousAnswer = stockitem.close - stockitem.open # elif idx == 1 and False: # remove data lookahed # tmpInsertData["answer"] = previousAnswer # globalDataframeWindow = globalDataframeWindow.append( # tmpInsertData, ignore_index=True # ) # previousAnswer = stockitem.close else: tmpInsertData["answer"] = previousAnswer globalDataframeMain = globalDataframeMain.append( tmpInsertData, ignore_index=True) previousAnswer = stockitem.close - stockitem.open except Exception as e: logger.warning( f"appStockPrediction - MainWrapperKR - createDataframe; {e}") finally: return globalDataframeMain, globalDataframePredictions, globalDataframeWindow
def qs_to_result(result: QuerySet, stride: int = 1, group: bool = False, shuffle: bool = False, deterministic_order: bool = False, custom_order_by_id: List[int] = None, frame_major: bool = True, show_count: bool = False, limit: int = 100, color: str = "red") -> Dict: count = result.count() if show_count else 0 if shuffle: result = result.order_by('?') materialized_result = [] cls = result.model bases = cls.__bases__ if bases[0] is base_models.Frame: if custom_order_by_id is not None: result = sorted(result, key=lambda x: custom_order_by_id.index(x.id)) elif not shuffle and deterministic_order: result = result.order_by('video', 'number') for frame in result[:limit * stride:stride]: materialized_result.append({ 'video': frame.video.id, 'min_frame': frame.number, 'objects': [] }) elif (cls is Face or cls is FaceGender or cls is FaceIdentity or cls is Object or cls is Pose or cls is FaceLandmarks): if cls is FaceGender or cls is FaceIdentity or cls is FaceLandmarks: frame_path = 'face__frame' if cls is FaceGender: result = result.select_related('face', 'gender') elif cls is FaceIdentity: result = result.select_related('face', 'identity') else: result = result.select_related('face') else: frame_path = 'frame' result = result.select_related(frame_path) if not shuffle and deterministic_order: result = result.order_by(frame_path + '__video', frame_path + '__number') if cls is Face: fn = bbox_to_dict elif cls is Object: fn = object_to_dict elif cls is FaceGender: fn = gender_to_dict elif cls is FaceIdentity: fn = identity_to_dict elif cls is Pose: fn = pose_to_dict elif cls is FaceLandmarks: fn = face_landmarks_to_dict if frame_major: frame_ids = set() def get_all_results(): all_results = collect( list(result.filter(**{frame_path + '__in': list(frame_ids)})), lambda t: access(t, frame_path + '__id')) return all_results if custom_order_by_id is None: frames = set() for inst in list( result.values( frame_path + '__video', frame_path + '__number', frame_path + '__id').annotate(m=F('id') % stride).filter(m=0)[:limit]): frames.add((inst[frame_path + '__video'], inst[frame_path + '__number'], inst[frame_path + '__id'])) frame_ids.add(inst[frame_path + '__id']) all_results = get_all_results() frames = list(frames) frames.sort(key=itemgetter(0, 1)) else: frames = {} id_to_position = defaultdict(lambda: float('inf')) for i, id_ in enumerate(custom_order_by_id): id_to_position[id_] = i for inst in list( result.values( 'id', frame_path + '__video', frame_path + '__number', frame_path + '__id').annotate(m=F('id') % stride).filter(m=0)): frame_key = (inst[frame_path + '__video'], inst[frame_path + '__number'], inst[frame_path + '__id']) frames[frame_key] = min(id_to_position[inst['id']], frames[frame_key] if frame_key in frames else float('inf')) frame_ids.add(inst[frame_path + '__id']) all_results = get_all_results() frames = sorted([x for x in frames.items()], key=lambda x: x[1]) frames = [x[0] for x in frames[:limit]] for (video, frame_num, frame_id) in frames: materialized_result.append({ 'video': video, 'min_frame': frame_num, 'objects': [fn(inst) for inst in all_results[frame_id]] }) else: for inst in result[:limit * stride:stride]: r = { 'video': inst.frame.video.id, 'min_frame': inst.frame.number, 'objects': [fn(inst)] } materialized_result.append(r) elif bases[0] is base_models.Track: if not shuffle and deterministic_order: result = result.order_by('video', 'min_frame') for t in result.annotate(duration=Track.duration_expr()).filter(duration__gt=0)[:limit]: result = { 'video': t.video.id, 'track': t.id, 'min_frame': t.min_frame, 'max_frame': t.max_frame, } materialized_result.append(result) if custom_order_by_id is not None: materialized_result.sort(key=lambda x: custom_order_by_id.index(x['track'])) else: materialized_result.sort(key=itemgetter('video', 'min_frame')) elif bases[0] is base_models.Video: if custom_order_by_id is not None: raise NotImplementedError() if not shuffle and deterministic_order: result = result.order_by('id') for v in result[:limit]: materialized_result.append({ 'video': v.id, 'min_frame': 0, 'max_frame': v.num_frames}) else: raise Exception("Unsupported class") ty_name = cls.__name__ if group: by_video = collect(materialized_result, itemgetter('video')) videos = collect(Video.objects.filter(id__in=by_video.keys()).all(), attrgetter('id')) groups = [{ 'type': 'contiguous', 'label': video, 'num_frames': videos[video][0].num_frames, 'elements': [{ 'video': video, 'segments': sorted(by_video[video], key=itemgetter('min_frame')), 'color': color }] } for video in sorted(by_video.keys())] else: groups = [{'type': 'flat', 'label': '', 'elements': [r]} for r in materialized_result] return {'result': groups, 'count': count, 'type': ty_name}
def order_queryset(queryset: QuerySet, params: list) -> QuerySet: return queryset.order_by(*params)
def publisher_advertisers(request, view='my', page=None, template='publisher/advertisers/index.html'): ''' View to manage Publisher's Advertisers. This view has three different resultsets based on the URL specified. '/my/' displays this Publishers Advertisers. '/find/' provides a search interface to all Advertisers, and '/expired/' displays this Publisher's expired Advertisers. This View has sortable result columns based on the GET variable 'sort' and the template displays different actions for each View based upon the form variable "view" which is derived from the URL. ''' from atrinsic.base.models import Organization from atrinsic.util.xls import write_rows from forms import AdvertiserSearchForm import tempfile from django.db.models import Q q = None vertical = None sort_next = '#' download = False if request.GET: form = AdvertiserSearchForm(request.organization, request.GET) if form.is_valid(): q = form.cleaned_data.get('q', None) vertical = form.cleaned_data.get('vertical', None) min_rating = form.cleaned_data.get('network_rating', None) date_from = form.cleaned_data.get('date_from', None) date_to = form.cleaned_data.get('date_to', None) if (vertical == '-1') or len(vertical) < 1: vertical = None if view == 'my': settab(request,"Publisher","Advertisers","My Advertisers") # My Advertisers qs = Organization.objects.filter(publisher_relationships__status=RELATIONSHIP_ACCEPTED,status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization).extra(select={"publisher_id":"select publisher_id from base_organization where id="+str(request.organization.id)}) if request.GET.get('download', None) is not None: template = 'publisher/advertisers/download.csv' download = True elif view == 'expired': settab(request,"Publisher","Advertisers","Expired Advertisers") qs = Organization.objects.filter(publisher_relationships__status__in=[RELATIONSHIP_EXPIRED,RELATIONSHIP_DECLINED],status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization).extra(select={"publisher_id":"select publisher_id from base_organization where id="+str(request.organization.id)}) else: # Default Advertiser Finder qs = Organization.objects.filter(org_type=ORGTYPE_ADVERTISER,status=ORGSTATUS_LIVE).filter(has_program_term=True, is_private=False) if q is not None: qs = qs.filter((Q(show_alias=True) & Q(company_alias__icontains=q)) | (Q(show_alias=False) & Q(company_name__icontains=q))) if vertical is not None: qs = qs.filter(vertical__order=vertical) else: qs = qs.filter(is_adult=request.organization.is_adult) if min_rating is not None: qs = qs.filter(network_rating__gte=str(min_rating)) if form.cleaned_data.get('email_marketing', False): qs = qs.filter(allow_third_party_email_campaigns=True) if form.cleaned_data.get('direct_linking', False): qs = qs.filter(allow_direct_linking_through_ppc=True) if form.cleaned_data.get('trademark_bidding', False): qs = qs.filter(allow_trademark_bidding_through_ppc=True) if date_from is not None: qs = qs.filter(date_joined__gte=date_from) if date_to is not None: qs = qs.filter(date_joined__lte=date_to) else: qs = QuerySet() else: form = AdvertiserSearchForm(organization=request.organization) if view == 'my': settab(request,"Publisher","Advertisers","My Advertisers") # My Advertisers qs = Organization.objects.filter(publisher_relationships__status=RELATIONSHIP_ACCEPTED,status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization).extra(select={"publisher_id":"select publisher_id from base_organization where id="+str(request.organization.id)}) elif view == 'expired': settab(request,"Publisher","Advertisers","Expired Advertisers") qs = Organization.objects.filter(publisher_relationships__status__in=[RELATIONSHIP_EXPIRED,RELATIONSHIP_DECLINED],status=ORGSTATUS_LIVE, publisher_relationships__publisher=request.organization).extra(select={"publisher_id":"select publisher_id from base_organization where id="+str(request.organization.id)}) else: # XXX qs = Organization.objects.none() sort = request.GET.get('sort', 'date_joined').lower() if qs.model and qs.count(): display_results = True sort_fields = [ 'ticker', 'company_name', 'state', 'country', 'vertical', 'network_rating', 'date_joined', 'contact_firstname', 'force', ] for f in sort_fields: if sort.endswith(f): if sort.startswith('-'): sort_next = sort[1:] else: sort_next = '-%s' % sort qs = qs.order_by(sort) break # if finding, remove all the advertisers that have outstanding relationships if view == 'find': result = [] for i in qs: rs = i.get_advertiser_relationship(request.organization) if rs == None: result.append(i) elif rs.status == RELATIONSHIP_NONE: result.append(i) total_results = len(result) qs = result else: total_results = qs.count() else: if view == 'expired': display_results = True total_results = 0 elif form.is_valid(): display_results = True total_results = 0 else: display_results = False total_results = 0 if download == True: file_id,file_path = tempfile.mkstemp() res = [[ 'Ticker', 'Company Name', 'State', 'Country', 'Vertical', 'Network Rating','Date Joined', 'Force']] for row in qs: res.append([str(row.ticker), str(row.company_name), str(row.state), str(row.country), str(row.vertical), str(row.network_rating), str(row.date_joined), str(row.force)]) write_rows(file_path,res) res = open(file_path).read() response = HttpResponse(res,mimetype="application/vnd.ms-excel") response['Content-Disposition'] = 'attachment; filename=myadvertisers.xls' return response else: return object_list(request, queryset=qs, allow_empty=True, page=page, template_name='publisher/advertisers/index.html', paginate_by=50, extra_context={ 'q' : q, 'display_results' : display_results, 'form' : form, 'sort' : sort, 'sort_next' : sort_next, 'total_results' : total_results, 'view' : view, })