def similar_students(userid, college): u = Student.objects.get(userid=userid) # aggregate with class mates classmates = Student.objects.filter( high_school_name=u.high_school_name ) if u.high_school_name != None else Student.objects.all() if classmates: if not u.SAT and not u.ACT_composite: new_ACT = classmates.aggregate( Avg("ACT_composite"))["ACT_composite__avg"] if new_ACT: u.ACT_composite = int(new_ACT) new_SAT = classmates.aggregate(Avg("SAT"))["SAT__avg"] if new_SAT: u.SAT = int(new_SAT) if not u.GPA: new_GPA = classmates.aggregate(Avg("GPA"))["GPA__avg"] if new_GPA: u.GPA = new_GPA if not u.SAT and not u.ACT_composite: u.SAT = 0 u.ACT_composite = 0 if not u.GPA: u.GPA = 0 # user only specified one test score, act XOR sat, convert it to the other if bool(u.SAT) ^ bool(u.ACT_composite): with open("backend/data/act_sat.json") as f: conversion = json.loads(f.read()) if u.ACT_composite: SAT_range = conversion[str(u.ACT_composite)] low, high = tuple(SAT_range.split("-")) u.SAT = (int(low) + int(high)) // 2 else: for act, SAT_range in conversion.items(): low, high = tuple(SAT_range.split("-")) if u.SAT >= int(low) and u.SAT <= int(high): u.ACT_composite = int(act) break print("SIMILARITY SCORES", "major, hs, subject, ap, gpa, composite") # At this point, user has GPA, SAT, and ACT # User may have or not high school, number of AP, majors, subject SATs applications = college.application_set.all().filter(questionable=False) students_with_score = [] for application in applications: s = application.student if (s == None): break if s.userid == userid: break # s = Student.objects.get(userid=application.student) hs = ap = major = subject = gpa = composite = 0 # similar gpa? s.GPA = s.GPA or 0 gpa = (4.0 - float(abs(s.GPA - u.GPA))) / 4.0 if s.GPA != 0 else 0 # similar composite? s.SAT = s.SAT or 0 s.ACT_composite = s.ACT_composite or 0 amount_count = 0 if s.SAT != 0 and s.SAT != None: composite = (1200 - float(abs(s.SAT - u.SAT))) / 1200 amount_count += 1 if s.ACT_composite != None and s.ACT_composite != 0: amount_count += 1 composite = ( composite + (35 - float(abs(s.ACT_composite - u.ACT_composite))) / 35) composite = composite / amount_count # composite = max ((1200 - float(abs (s.SAT - u.SAT))) / 1200, (35 - float(abs (s.ACT_composite - u.ACT_composite))) / 35) if s.SAT != 0 and s.ACT_composite != 0 else 0 # similar high school? northeast = ["ME", "NH", "VT", "MA", "NY", "RI", "CT", "PA", "NJ"] midwest = [ "OH", "MI", "IN", "WI", "IL", "MI", "IA", "MO", "ND", "SD", "NE", "KS", "MN", ] west = [ "MT", "ID", "WY", "CO", "NM", "AZ", "UT", "NV", "CA", "OR", "WA", "AK", "HI", ] south = [ "TX", "OK", "AR", "LA", "MS", "AL", "TN", "GA", "FL", "KY", "SC", "NC", "VA", "WV", "DC", "MD", "DE", ] regions = [northeast, midwest, west, south] if u.high_school_name is not None and s.high_school_name is not None: if u.high_school_name == s.high_school_name: hs = 1 elif u.high_school_state == s.high_school_state: hs = 0.5 else: for region in regions: if u.high_school_state in region and s.high_school_state in region: hs = 0.25 break # similar AP? u.num_AP_passed = u.num_AP_passed or 0 s.num_AP_passed = s.num_AP_passed or 0 ap = (min(u.num_AP_passed, s.num_AP_passed) / max(u.num_AP_passed, s.num_AP_passed) if u.num_AP_passed != 0 and s.num_AP_passed != 0 else 0) # similar major? major_set = [s.major_1, s.major_2] if u.major_1 in major_set or u.major_2 in major_set: major = 1 # similar SAT subject? subject_pairs = [ [u.SAT_literature, s.SAT_literature], [u.SAT_US_hist, s.SAT_US_hist], [u.SAT_world_hist, s.SAT_world_hist], [u.SAT_math_I, s.SAT_math_I], [u.SAT_math_II, s.SAT_math_II], [u.SAT_eco_bio, s.SAT_eco_bio], [u.SAT_mol_bio, s.SAT_mol_bio], [u.SAT_chemistry, s.SAT_chemistry], [u.SAT_physics, s.SAT_physics], ] number_of_same_taken = 0 for pair in subject_pairs: pair[0] = pair[0] or 0 pair[1] = pair[1] or 0 if pair[0] != 0 and pair[1] != 0: subject = (subject * number_of_same_taken + (600 - float(abs(pair[1] - pair[0]))) / 600) number_of_same_taken += 1 subject = subject / number_of_same_taken s.similar_score = (0.05 * major + 0.10 * subject + 0.05 * hs + 0.05 * ap + 0.2 * gpa + 0.2 * composite) / ( 0.05 + 0.10 + 0.05 + 0.05 + 0.2 + 0.2) students_with_score.append(s) print( s.userid, "Similarity Score:", s.similar_score, "Before Weights:", major, subject, hs, ap, gpa, composite, "After Weights:", 0.05 * major, 0.10 * subject, 0.05 * hs, 0.05 * ap, 0.2 * gpa, 0.2 * composite, ) # result.sort(key=lambda s: s.similar_score) # all_students = sorted(all_students,key=lambda s: s.similar_score) # students_with_score.order_by('-similar_score') students_with_score.sort(key=lambda x: x.similar_score, reverse=True) return students_with_score[:100]
def avg_win_error(self): """Calculates single overall error average""" return super().get_queryset().all().aggregate( Avg('error'))['error__avg']
def get_rating(self, obj): rating = obj.reviews.all().aggregate(Avg('score')).get('score__avg') return rating or None
def get_queryset(self): queryset = self.queryset if 'allstats' in self.request.query_params: queryset = queryset\ .prefetch_related('review_set')\ .annotate( # ratings average_instructor=Avg('review__instructor_rating'), average_fun=Avg('review__enjoyability'), average_recommendability=Avg('review__recommendability'), average_difficulty=Avg('review__difficulty'), average_rating=( Avg('review__instructor_rating') + Avg('review__enjoyability') + Avg('review__recommendability') ) / 3, # workload average_hours_per_week=Avg('review__hours_per_week'), average_amount_reading=Avg('review__amount_reading'), average_amount_writing=Avg('review__amount_writing'), average_amount_group=Avg('review__amount_group'), average_amount_homework=Avg('review__amount_homework'), # grades # TODO: average_gpa should be fixed average_gpa=Avg('coursegrade__average', distinct=True), a_plus=Sum('coursegrade__a_plus', distinct=True), a=Sum('coursegrade__a', distinct=True), a_minus=Sum('coursegrade__a_minus', distinct=True), b_plus=Sum('coursegrade__b_plus', distinct=True), b=Sum('coursegrade__b', distinct=True), b_minus=Sum('coursegrade__b_minus', distinct=True), c_plus=Sum('coursegrade__c_plus', distinct=True), c=Sum('coursegrade__c', distinct=True), c_minus=Sum('coursegrade__c_minus', distinct=True), d_plus=Sum('coursegrade__d_plus', distinct=True), d=Sum('coursegrade__d', distinct=True), d_minus=Sum('coursegrade__d_minus', distinct=True), f=Sum('coursegrade__f', distinct=True), ot=Sum('coursegrade__ot', distinct=True), drop=Sum('coursegrade__drop', distinct=True), withdraw=Sum('coursegrade__withdraw', distinct=True), total_enrolled=Sum('coursegrade__total_enrolled', distinct=True), ) elif 'simplestats' in self.request.query_params: queryset = queryset\ .prefetch_related('review_set')\ .annotate( average_gpa=Avg('coursegrade__average'), average_difficulty=Avg('review__difficulty'), average_rating=( Avg('review__instructor_rating') + Avg('review__enjoyability') + Avg('review__recommendability') ) / 3) if 'recent' in self.request.query_params: latest_semester = Semester.latest() queryset = queryset.filter( semester_last_taught__year__gte=latest_semester.year - 5 ) return queryset.order_by('number')
from django.db.models import Count, Sum, Avg from charactercreator.models import Character, Fighter, Mage, Cleric, Thief, Necromancer from armory.models import Item, Weapon # How many total Characters are there? Character.objects.count() # 302 # How many of each specific subclass? Fighter.objects.count() # 68 len(Mage.objects.filter(necromancer__isnull=True)) # 97 Cleric.objects.count() # 75 Thief.objects.count() # 51 Necromancer.objects.count() # 11 # How many total Items? Item.objects.count() # 174 # How many of the Items are weapons? Weapon.objects.count() # 37 # How many are not? len(Item.objects.filter(weapon__isnull=True)) # 137 # On average, how many Items does each Character have? Character.objects.all().annotate(count=Count('inventory')).aggregate(Avg('count')) # 2.9735099337748343 # On average, how many Weapons does each character have? Character.objects.all().annotate(count=Count('inventory__weapon')).aggregate(Avg('count')) #0.6721854304635762
def index(request): result = Book.objects.aggregate(avgaaa=Avg('price')) print(result) # print(result.query) 因为返回的不是QuerySet 所以不能用query print(connection.queries[-1]) return HttpResponse('index')
players.objects.filter(esalary__range=(200, 8000)) #/////// or condition ///// obj = players.objects.filter(ename__startswith='s') | players.objects.filter( esalary__gt=5000) from django.db.models import Q obj = players.objects.filter( Q(ename__startswith='s') | q(esalary__range=(50, 500))) #//// and condition //// obj = players.objects.filter(ename__startswith='s', esalary__range=(50, 500)) obj = players.objects.filter( Q(ename__startswith='s') & Q(esalary__range=(50, 500))) #//// not /////// obj = players.objects.exclude(esalary__range=(50, 500)) players.objects.filter(~Q(esalary__range=(50, 500))) #////////////// working with multipultabels //////////// #/////////// union ///////////////////////// a = players.objects.filter(ename__startswith='l') b = matches.objects.filter( escore__gt=5000) #=======>(both tebles should have same colums and names) c = a.union(b) players.objects.all().values_list("name", "email") #/////////// agrregate ///////// from django.db.models import Avg, Sum, Min, Max, Count players.objects.all().aggegate(Avg('esalary')) players.objects.all().order_by("esalary") players.objects.all().order_by("-esalary")[0] from django.db.models.functions import Lower players.objects.all().order_by(Lower("ename")) #///////////// model inheritance ////////// # abstract base inheritance,multitable,multiple a,b,c(a,b),multi level
def get(self, request): data = {'id': request.user.id} # requestingUser = User.objects.get(id=data['id']) requestingUser = User.objects.get(id=2) userProfile = Userprofile.objects.get(userID=requestingUser) companyUsers = Userprofile.objects.filter( companyID=userProfile.companyID).order_by('firstname') # realUsers = User.objects.filter(id__in=[userID['userID'] for userID in list(companyUsers.values('userID'))]) realUsers = User.objects.filter( id__in=companyUsers.values_list('userID', flat=True)) sessions = Session.objects.filter( user__in=realUsers).order_by('sessionNumber') sessionElapsedDaysAllList = [] examScoreAvgList = [] examAttemptsAvgList = [] sessionElapsedDaysAvgList = [] examScoreMaxList = [] examAttemptsMaxList = [] sessionElapsedDaysMaxList = [] examScoreMinList = [] examAttemptsMinList = [] sessionElapsedDaysMinList = [] startDates = [] endDates = [] # Things get a bit ugly here, this should be improved, basically here we calculate all the avarages and sums and mins of the whole set of users for the admin # screen not yet released for public. for x in range(1, 13): examScoreAvgList.append( Exam.objects.filter(id__in=sessions.filter( sessionNumber=x).values_list( 'examID_id', flat=True)).aggregate(Avg('examScore'))) examAttemptsAvgList.append( Exam.objects.filter(id__in=sessions.filter( sessionNumber=x).values_list( 'examID_id', flat=True)).aggregate(Avg('attempts'))) examScoreMaxList.append( Exam.objects.filter(id__in=sessions.filter( sessionNumber=x).values_list( 'examID_id', flat=True)).aggregate(Max('examScore'))) examAttemptsMaxList.append( Exam.objects.filter(id__in=sessions.filter( sessionNumber=x).values_list( 'examID_id', flat=True)).aggregate(Max('attempts'))) examScoreMinList.append( Exam.objects.filter(id__in=sessions.filter( sessionNumber=x).values_list( 'examID_id', flat=True)).aggregate(Min('examScore'))) examAttemptsMinList.append( Exam.objects.filter(id__in=sessions.filter( sessionNumber=x).values_list( 'examID_id', flat=True)).aggregate(Min('attempts'))) startDates.append( sessions.filter(sessionNumber=x).values_list('startDate', flat=True)) endDates.append( sessions.filter(sessionNumber=x).values_list('endDate', flat=True)) for x in range(0, 12): sessionElapsedDaysAllList.append([ (endDates[x][y] - startDates[x][y]).days if endDates[x][y] != None else 0 for y in range(0, len(startDates[0])) ]) sessionElapsedDaysAvgList.append(mean( sessionElapsedDaysAllList[x])) sessionElapsedDaysMaxList.append(max(sessionElapsedDaysAllList[x])) sessionElapsedDaysMinList.append(min(sessionElapsedDaysAllList[x])) response = { 'avgs': { 'scores': [theList['examScore__avg'] for theList in examScoreAvgList], 'attempts': [theList['attempts__avg'] for theList in examAttemptsAvgList], 'days': sessionElapsedDaysAvgList }, 'maxes': { 'scores': [theList['examScore__max'] for theList in examScoreMaxList], 'attempts': [theList['attempts__max'] for theList in examAttemptsMaxList], 'days': sessionElapsedDaysMaxList }, 'mins': { 'scores': [theList['examScore__min'] for theList in examScoreMinList], 'attempts': [theList['attempts__min'] for theList in examAttemptsMinList], 'days': sessionElapsedDaysMinList } } return JsonResponse(response, safe=False)
def get_report_summary(drops, total_log_count): summary = { 'table': {}, 'chart': [], } # Chart data: list of {'drop': <string>, 'count': <int>} # Table data: dict (by drop type) of lists of items which drop, with stats. 'count' is only required stat. for drop_type, qs in drops.items(): if drop_type == models.ItemDrop.RELATED_NAME: # Chart excludes currency chart_data = list( qs.exclude(item__category=GameItem.CATEGORY_CURRENCY, ).values( name=F('item__name'), ).annotate( count=Count('pk'), ).filter( count__gt=0).order_by('-count')) table_data = list( qs.values( name=F('item__name'), icon=F('item__icon'), ).annotate( count=Count('pk'), min=Min('quantity'), max=Max('quantity'), avg=Avg('quantity'), drop_chance=Cast(Count('pk'), FloatField()) / total_log_count * 100, avg_per_run=Cast(Sum('quantity'), FloatField()) / total_log_count, ).filter(count__gt=0).order_by('item__category', '-count')) elif drop_type == models.MonsterDrop.RELATED_NAME: # Monster drops in chart are counted by stars chart_data = list( qs.values(name=Concat(Cast('grade', CharField()), Value('⭐ Monster'))).annotate( count=Count('pk')).filter( count__gt=0).order_by('-count')) table_data = replace_value_with_choice( list( qs.values( name=F('monster__name'), slug=F('monster__bestiary_slug'), icon=F('monster__image_filename'), element=F('monster__element'), can_awaken=F('monster__can_awaken'), is_awakened=F('monster__is_awakened'), stars=F('monster__base_stars'), ).annotate( count=Count('pk'), drop_chance=Cast(Count('pk'), FloatField()) / total_log_count * 100, )), {'element': Monster.ELEMENT_CHOICES}) else: # Chart can is name, count only item_name = ' '.join( [s.capitalize() for s in drop_type.split('_')]).rstrip('s') count = qs.aggregate(count=Count('pk'))['count'] if count > 0: chart_data = [{ 'name': item_name, 'count': count, }] else: chart_data = [] # Table data based on item type if drop_type == models.MonsterPieceDrop.RELATED_NAME: table_data = replace_value_with_choice( list( qs.values( name=F('monster__name'), icon=F('monster__image_filename'), element=F('monster__element'), count=Count('pk'), min=Min('quantity'), max=Max('quantity'), avg=Avg('quantity'), )), {'element': Monster.ELEMENT_CHOICES}) elif drop_type == models.RuneDrop.RELATED_NAME: table_data = { 'sets': replace_value_with_choice( list( qs.values('type').annotate( count=Count('pk')).order_by('type')), {'type': Rune.TYPE_CHOICES}), 'slots': list( qs.values('slot').annotate( count=Count('pk')).order_by('slot')), 'quality': replace_value_with_choice( list( qs.values('quality').annotate( count=Count('pk')).order_by('quality')), {'quality': Rune.QUALITY_CHOICES}), } elif drop_type == models.RuneCraftDrop.RELATED_NAME: table_data = replace_value_with_choice( list( qs.values('type', 'rune', 'quality').annotate( count=Count('pk'), ).order_by( 'type', 'rune', 'quality')), { 'type': Rune.TYPE_CHOICES, 'quality': Rune.QUALITY_CHOICES }) elif drop_type == models.DungeonSecretDungeonDrop.RELATED_NAME: table_data = replace_value_with_choice( list( qs.values( name=F( 'level__dungeon__secretdungeon__monster__name' ), element= F('level__dungeon__secretdungeon__monster__element' ), icon= F('level__dungeon__secretdungeon__monster__image_filename' ), ).annotate(count=Count('pk'), )), {'element': Monster.ELEMENT_CHOICES}) else: raise NotImplementedError( f"No summary table generation for {drop_type}") summary['chart'] += chart_data if table_data: summary['table'][drop_type] = table_data return summary
def getContext(city,country,state,result,request,query,types,bedno,bathno,highlight): values=[int(property.value) for property in result] mid=median(values) count=result.count() true=Property.objects.filter(city__icontains=city) try: onebedroomcount=result.filter(Q(bedroom=1)).count() onebedroom=result.filter(bedroom=1).aggregate(Avg('value')) except: onebedroomcount='0' onebedroom='0' request.session['onebedroomcount']=onebedroomcount try: twobedroomcount=result.filter(Q(bedroom=2)).count() twobedroom=result.filter(Q(bedroom=2)).aggregate(Avg('value')) #pdb.set_trace() except: twobedroomcount='0' twobedroom='0' request.session['twobedroomcount']=twobedroomcount try: morebedroomcount=result.filter(Q(bedroom__gte=3)).count() morebedroom=result.filter(Q(bedroom__gte=3)).aggregate(Avg('value')) except: morebedroomcount='0' morebedroom='0' request.session['morebedroomcount']=morebedroomcount if true.exists(): averages=Property.objects.values('state').annotate(Avg('value')).filter(Country__icontains=country) cityaverage=Property.objects.values('city').annotate(Avg('value')).filter(state__icontains=state) else: averages=Property.objects.values('state').annotate(Avg('value')).filter(Country__icontains=country) cityaverage=Property.objects.values('city').annotate(Avg('value')).filter(state__icontains=state) listing=getlisting(result,3,request) total=0 counter=0 for property in result: total+=property.value counter+=1 average_price= math.ceil(total/counter) values=[int(property.value) for property in result] midlist=median(values) try: pptysold=Sale.objects.values('state').annotate(Count('buyer')).get(pptystate__icontains=state) avgdeposit=Sale.objects.values('pptystate').annotate(Avg('down_payt')).get(pptystate__icontains=state) mediandeposit=Sale.objects.filter(pptystate__icontains=state) values=[int(property.down_payt) for property in mediandeposit] mediansale=median(values) except: pptysold='-' avgdeposit='-' mediansale='-' try: noofoffer=Offer.objects.values('state').annotate(Count('offerprice')).get(state__icontains=state) except : noofoffer='-' #pdb.set_trace() context={ 'result':listing, 'count':count, 'state':query, 'usestate':state, 'city':city, 'median':mid, 'country':country, 'averages':averages, 'cityaverage':cityaverage, 'topform':topForm(), 'filterform':filterForm(), 'onebedroom':onebedroom, 'twobedroom':twobedroom, 'morebedroom':morebedroom, 'onebedroomcount':onebedroomcount, 'twobedroomcount':twobedroomcount, 'morebedroomcount':morebedroomcount, 'averageprice':average_price, 'medianprice':midlist, 'avgdeposit':avgdeposit, 'mediansale':mediansale, 'noofoffer':noofoffer, 'pptysold':pptysold, 'hiddentype':types, 'hiddenbedno':bedno, 'hiddenbathno':bathno, 'highlight':highlight, 'today':date.today(), } return context
def get_no_result_context(query='',request='',types='',bedno='',bathno='',result='', highlight='home'): result=Property.objects.filter(Q(state__icontains=query)|Q(city__icontains=query)|Q(Country__icontains=query)|Q(street__icontains=query)) result=topFormFilter(types,bedno,bathno,result) try: city=result.latest('uploadDate').city state=result.latest('uploadDate').state country=result.latest('uploadDate').Country #pdb.set_trace() topFormFilter(types,bedno,bathno,result) listing=getlisting(result,3,request) averages=Property.objects.values('state').annotate(Avg('value')).filter(Country__icontains=country) cityaverage=Property.objects.values('city').annotate(Avg('value')).filter(state__icontains=state) count=result.count try: onebedroomcount=Property.objects.filter(Q(state__icontains=query)|Q(city__icontains=query)|Q(Country__icontains=query)|Q(street__icontains=query),bedroom=1).count() onebedroom=Property.objects.filter(Q(state__icontains=query)|Q(city__icontains=query)|Q(Country__icontains=query)|Q(street__icontains=query),bedroom=1).aggregate(Avg('value')) except: onebedroomcount='0' request.session['onebedroomcount']=onebedroomcount try: twobedroomcount=Property.objects.filter(Q(state__icontains=query)|Q(city__icontains=query)|Q(Country__icontains=query)|Q(street__icontains=query),bedroom=2).count() twobedroom=Property.objects.filter(Q(state__icontains=query)|Q(city__icontains=query)|Q(Country__icontains=query)|Q(street__icontains=query),bedroom=2).aggregate(Avg('value')) except: twobedroomcount='0' request.session['twobedroomcount']=twobedroomcount try: morebedroomcount=Property.objects.filter(Q(state__icontains=query)|Q(city__icontains=query)|Q(Country__icontains=query)|Q(street__icontains=query),bedroom__gte=3).count() morebedroom=onebedroom=Property.objects.filter(Q(state__icontains=query)|Q(city__icontains=query)|Q(Country__icontains=query)|Q(street__icontains=query),bedroom__gte=3).aggregate(Avg('value')) except: morebedroomcount='0' request.session['morebedroomcount']=morebedroomcount total=0 counter=0 for property in result: total+=property.value counter+=1 average_price= math.ceil(total/counter) values=[int(property.value) for property in result] midlist=median(values) try: pptysold=Sale.objects.values('state').annotate(Count('buyer')).get(pptystate__icontains=state) avgdeposit=Sale.objects.values('pptystate').annotate(Avg('down_payt')).get(pptystate__icontains=state) mediandeposit=Sale.objects.filter(pptystate__icontains=state) values=[int(property.down_payt) for property in mediandeposit] mediansale=median(values) except: pptysold='-' avgdeposit='-' mediansale='-' try: noofoffer=Offer.objects.values('state').annotate(Count('offerprice')).get(state__icontains=state) except : noofoffer='-' values=[int(property.value) for property in result] mid=median(values) context={ 'result':listing, 'count':count, 'state':query, 'usestate':state, 'city':city, 'median':mid, 'country':country, 'averages':averages, 'cityaverage':cityaverage, 'topform':topForm(), 'filterform':filterForm(), 'onebedroom':onebedroom, 'twobedroom':twobedroom, 'morebedroom':morebedroom, 'onebedroomcount':onebedroomcount, 'twobedroomcount':twobedroomcount, 'morebedroomcount':morebedroomcount, 'averageprice':average_price, 'medianprice':midlist, 'avgdeposit':avgdeposit, 'mediansale':mediansale, 'noofoffer':noofoffer, 'pptysold':pptysold, 'hiddentype':types, 'hiddenbedno':bedno, 'hiddenbathno':bathno, 'highlight':highlight, } #pdb.set_trace() request.session['count']=result.count() request.session['query']=query return context except: if not result.exists(): result=Property.objects.filter(feature=True) listing=getlisting(result,3,request) context={'result':listing,'topform':topForm(),'feature':True,'highlight':highlight} return context
def propertyDetail(request, property_id): result=get_object_or_404(Property, pk=property_id) stateresult=Property.objects.filter(state__icontains=result.state) count=stateresult.count values=[int(property.value) for property in stateresult] stateaverage=Property.objects.values('state').annotate(Avg('value')).filter(state__icontains=result.state) mid=median(values) try: similarpptysold=Property.objects.filter(Q(sale__state__icontains='True'),state__icontains=result.state) except : try: similarpptysold=Property.objects.filter(Q(sale__state__icontains='True'),country__icontains=result.country) except : try: similarpptysold=Property.objects.filter(sale__state__icontains='True') except : similarpptysold='-' try: similarppty=Property.objects.filter(state__icontains=result.state).exclude(sale__state__icontains='True') except : try: similarppty=Property.objects.filter(country__icontains=result.country).exclude(sale__state__icontains='True') except : similarppty='-' try: pptysold=Sale.objects.values('state').annotate(Count('buyer')).get(pptystate__icontains=result.state) avgdeposit=Sale.objects.values('state').annotate(Avg('down_payt')).get(pptystate__icontains=result.state) except: pptysold='-' avgdeposit='-' try: avgoffer=Offer.objects.values('property').annotate(Avg('offerprice')).get(property=result) except : avgoffer='no offer yet' try: similarhomes=Property.objects.filter(city__icontains=result.city) except : try: similarhomes=Property.objects.filter(city__icontains=result.state) except : similarhomes=Property.objects.filter(city__icontains=result.country) try: history=History.objects.filter(property=result) except : history='-' max_range=int(result.value)*1.2 min_range=int(result.value)*0.8 initialdownpayt=int(result.value)*0.4 principal=(int(result.value)-initialdownpayt)/60 request.session['state']=result.state request.session['city']=result.city request.session['country']=result.Country detailform=detailcontactform() try: resultcount=request.session['count'] except KeyError: resultcount=0 try: resultquery=request.session['query'] except KeyError: resultquery=result.street context={ 'result':result, 'detailform':detailform, 'median':mid, 'stateaverage':stateaverage, 'count':count, 'avgoffer':avgoffer, 'pptysold':pptysold, 'avgdeposit':avgdeposit, 'similarpptysold':similarpptysold, 'similarhomes':similarhomes, 'history':history, 'minrange':min_range, 'maxrange':max_range, 'payt':initialdownpayt, 'principal':principal, 'nearbycount':resultcount, 'nearbyquery':resultquery, 'topform':topForm(), 'detailtopform':detailContactFormTop(), } return render(request,'property/detail.html',context)
def facebook_ods_checkin(self): """Pushes model information to ODS Runs each miniute """ app_id = settings.FACEBOOK['APP_ID'] app_secret = settings.FACEBOOK['APP_SECRET'] ods_url = 'https://graph.facebook.com/ods_metrics?access_token=%s|%s' % \ (app_id, app_secret) one_minute_ago = django.utils.timezone.now() - datetime.timedelta( minutes=1) datapoints = [] datapoints.append({ 'entity': 'etagecom.cloud.worker', 'key': 'is_active', 'value': 1 }) for bts in BTS.objects.iterator(): if bts.last_active: network = bts.network ent_name = 'etagecom.%s.%s.%s.%s' % ( network.environment, network.id, network.name, bts.uuid) datapoint = { 'entity': ent_name, 'key': 'is_active', 'value': int(bts.status == 'active') } datapoints.append(datapoint) if bts.last_active >= one_minute_ago: # usage events for this bts since it was last active, group and sum by kind for event_group in UsageEvent.objects.filter(bts=bts).filter( date_synced__gte=bts.last_active ).values('kind').annotate(count_kind=Count('id')).order_by(): datapoints.append({ 'entity': ent_name, 'key': event_group['kind'], 'value': event_group['count_kind'] }) # stats for this bts since it was last active, group and average by key for timeseries_stat in TimeseriesStat.objects.filter( bts=bts).filter( date__gte=bts.last_active).values('key').annotate( average_value=Avg('value')).order_by(): datapoints.append({ 'entity': ent_name, 'key': timeseries_stat['key'], 'value': timeseries_stat['average_value'] }) # subscribers that have camped within the T3212 window on this BTS are considered active t3212_mins = int( ConfigurationKey.objects.get(network=bts.network, key="GSM.Timer.T3212").value) t3212_window_start = bts.last_active - datetime.timedelta( minutes=t3212_mins) camped_subscribers = Subscriber.objects.filter(bts=bts).filter( last_camped__gte=t3212_window_start) datapoints.append({ 'entity': ent_name, 'key': 'camped_subscribers', 'value': camped_subscribers.count() }) requests.post(ods_url, data={'datapoints': json.dumps(datapoints)})
def review(request, quest_id, reviewed_id): """Reviews a user for a quest, where reviewed_id is the id of the user being reviewed""" logger.debug("User being reviewed is %s", reviewed_id) logger.debug("User who is reviewing is %s", request.user.id) # If the user tries to review himself if int(reviewed_id) == int(request.user.id): return redirect('home') if request.method == "POST": """ update the shipper review from the offerer calculate the final rating """ # If the user tries to review himself if int(reviewed_id) == int(request.user.id): return redirect('home') ratings = request.POST.getlist('rating') try: questdetails = Quests.objects.get(id=quest_id) except Quests.DoesNotExist: raise Http404 return render(request, '404.html') try: reviewed = QuestrUserProfile.objects.get(id=reviewed_id) except QuestrUserProfile.DoesNotExist: raise Http404 return render(request, '404.html') review_form = ReviewForm(data=request.POST) if review_form.is_valid(): review = review_form.save(commit=False) review.quest_id = questdetails.id review.reviewed_id = reviewed.id review.rating_1 = float(ratings[0]) review.rating_2 = float(ratings[1]) review.rating_3 = float(ratings[2]) review.rating_4 = float(ratings[3]) review.save() logger.debug( "Review done by {0} on quest with id {1} which was complted by shipper {2}" .format(request.user, quest_id, reviewed.get_full_name())) final_rating = Review.objects.filter( reviewed=reviewed.id).aggregate(Avg('final_rating')) final_rating = round(final_rating['final_rating__avg'], 1) QuestrUserProfile.objects.filter(id=reviewed_id).update( rating=final_rating) # if the guy being reviewed is the questr # changing the types to integer for it's an integer comparision, IDs are integers if int(questdetails.questrs_id) == int(reviewed_id): logger.debug("shipper is reviewed") Quests.objects.filter(id=quest_id).update( is_questr_reviewed='t') # if the guy being reviewed is the shipper # changing the types to integer for it's an integer comparision, IDs are integers if int(questdetails.shipper) == int(reviewed_id): logger.debug("questr is reviewed") Quests.objects.filter(id=quest_id).update( is_shipper_reviewed='t') return redirect('home') try: current_quest = Quests.objects.get(id=quest_id) except Quests.DoesNotExist: raise Http404 return render('404.html', locals()) try: shipper = QuestrUserProfile.objects.get(id=reviewed_id) full_name = shipper.get_full_name() except QuestrUserProfile.DoesNotExist: raise Http404 return render('404.html', locals()) try: is_reviewed = Review.objects.get(quest=current_quest, reviewed=shipper) logger.debug(is_reviewed) except Review.DoesNotExist: is_reviewed = False return render(request, 'questrReview.html', locals()) logger.debug("redirecting to home") return redirect('home')
def get_queryset(self): #Filter name user = self.request.query_params.get('user') if user is not None : nameset = Photographer.objects.filter(Q(profile__user__username__icontains=user)|Q(profile__user__first_name__icontains=user)|Q(profile__user__last_name__icontains=user)) else : nameset = Photographer.objects.all() #Filter other parameters style = self.request.query_params.get('style') time = self.request.query_params.get('time') metafil = {'photographer_style__style_name': style, 'photographer_avail_time__avail_time': time} filters = {k: v for k, v in metafil.items() if v is not None} paraset = nameset.filter(**filters) #Filter Date (allphotographer - photographerwithjobs) date = self.request.query_params.get('date') if date is not None : #Filter Photographer by date day, month, year = (int(x) for x in date.split('_')) sel_date = (datetime.date(year, month, day)).strftime('%A') dateset = paraset.filter(photographer_avail_time__avail_date = sel_date) #Filter out reserved photographer compdate = "-".join(date.split("_")[::-1]) jobset = JobInfo.objects.filter(job_reservation__photoshoot_date = compdate).values_list('job_photographer_id', flat=True) toremove = [] for cid in jobset : if dateset.filter(profile__user__id=cid) is not None: toremove.append(cid) queryset = dateset.filter(~Q(profile__user__id__in=toremove)) else : queryset = paraset #Sort sort = self.request.query_params.get('sort') if sort == "time_des" : return queryset.order_by("-photographer_last_online_time") elif sort == "time_asc" : return queryset.order_by("photographer_last_online_time") elif sort == "price_des" : return queryset.annotate(price=Avg('photographer_avail_time__photographer_price')).order_by('-price') elif sort == "price_asc" : return queryset.annotate(price=Avg('photographer_avail_time__photographer_price')).order_by('price') elif sort == "review_des": counts = dict() jobidlist = ReviewInfo.objects.values_list('reviewJob', flat=True) for i in jobidlist : pid = JobInfo.objects.filter(job_id=i).values_list('job_photographer_id', flat=True) counts[pid[0]] = counts.get(pid[0], 0) + 1 return queryset.annotate( score=Case( *[When(profile__user__id=k, then=Value(v)) for k,v in counts.items()], default=None, output_field=IntegerField(null=True) )).order_by('-score') elif sort == "review_asc" : counts = dict() jobidlist = ReviewInfo.objects.values_list('reviewJob', flat=True) for i in jobidlist : pid = JobInfo.objects.filter(job_id=i).values_list('job_photographer_id', flat=True) counts[pid[0]] = counts.get(pid[0], 0) + 1 return queryset.annotate( score=Case( *[When(profile__user__id=k, then=Value(v)) for k,v in counts.items()], default=None, output_field=IntegerField(null=True) )).order_by('score') return queryset
def grade_summary_report(qs, grade_choices): report_data = [] drops = get_drop_querysets(qs) total_log_count = qs.count() # List of all drops. Currently only care about monsters and items all_items = GameItem.objects.filter(pk__in=drops['items'].values_list( 'item', flat=True)) if 'items' in drops else [] all_monsters = Monster.objects.filter(pk__in=drops['monsters'].values_list( 'monster', flat=True)) if 'monsters' in drops else [] all_runes = drops['runes'] if 'runes' in drops else [] for grade_id, grade_name in grade_choices: grade_qs = qs.filter(grade=grade_id) grade_report = { 'grade': grade_name, 'drops': [], } for item in all_items: result = drops['items'].filter( log__in=grade_qs, item=item, ).aggregate( count=Count('pk'), min=Min('quantity'), max=Max('quantity'), avg=Avg('quantity'), drop_chance=Cast(Count('pk'), FloatField()) / total_log_count * 100, avg_per_run=Cast(Sum('quantity'), FloatField()) / total_log_count, ) grade_report['drops'].append({ 'type': 'item', 'name': item.name, 'icon': item.icon, **result, }) for monster in all_monsters: result = drops['monsters'].filter( log__in=grade_qs, monster=monster).aggregate( count=Count('pk'), drop_chance=Cast(Count('pk'), FloatField()) / total_log_count * 100, avg_per_run=Cast(Count('pk'), FloatField()) / total_log_count, ) grade_report['drops'].append({ 'type': 'monster', 'name': monster.name, 'icon': monster.image_filename, 'stars': monster.base_stars, **result, }) report_data.append(grade_report) for stars in all_runes.values_list('stars', flat=True): result = drops['runes'].filter( log__in=grade_qs, stars=stars, ).aggregate( count=Count('pk'), drop_chance=Cast(Count('pk'), FloatField()) / total_log_count * 100, avg_per_run=Cast(Count('pk'), FloatField()) / total_log_count, ) grade_report['drops'].append({ 'type': 'rune', 'name': f'{stars}⭐ Rune', **result, }) return report_data
def get_queryset(self): movies = Movie.objects.filter(draft=False).annotate(rating_user=Count( "ratings", filter=Q( ratings__ip=get_client_ip(self.request)))).annotate( middle_star=(Avg("ratings__star"))) return movies
def data_oper(req): # 1.Django的惰性机制 obj_set = models.Book.object.filter(id=2) # 获得querySet对象,此句并不走数据库 # 1.1:验证Django的缓存机制 # for obj in obj_set: # print(obj.title) # obj_set.update(title="yyy") # 不加此句只有两个for循环不走数据库只走缓存,加上此句既走数据库也走缓存 # for obj in obj_set: # print(obj.title) # 1.2:使用迭代器解决所有数据放进缓存池的问题 # if obj_set.iterator(): # pass # 1.3:对querySet对象进行切片,走了数据库 # obj = obj_set[0] # ################################################################################################################################### # 2.对象形式的查找 # 2.1.正向查找 obj1 = models.Book.objects.filter(title="Go")[0] # 查询某本书的出版社所在的城市 print(obj1.publisher.city) ret1 = models.Book.objects.first() print(ret1.title) print(ret1.price) print(ret1.publisher) print(ret1.publisher.name) # 因为一对多的关系所以ret1.publisher是一个对象,而不是一个queryset集合 # 2.2.反向查找 obj2 = models.Publish.objects.filter(name="人民出版社") # 查询出版社是"人民出版社"的数据 print(obj2.book_set.all.values("title").distinct()) # distinct(),对结果去重 ret2 = models.Publish.objects.last() print(ret2.name) print(ret2.city) # 如何拿到与它绑定的Book对象呢? print(ret2.book_set.all()) # ret2.book_set是一个queryset集合 # ################################################################################################################################### # 3.双下划线(__)之单表条件查询 models.Book.objects.filter(id__lt=10, id__gt=1) # 获取id大于1 且 小于10的值 models.Book.objects.filter(id__in=[11, 22, 33]) # 获取id等于11、22、33的数据 models.Book.objects.exclude(id__in=[11, 22, 33]) # not in models.Book.objects.filter(name__contains="ven") # 区分大小写过滤的 models.Book.objects.filter(name__icontains="ven") # 不区分大小写过滤的 models.Book.objects.filter(id__range=[1, 2]) # 范围bettwen and models.Book.objects.filter( title__startwith="P") # startswith,istartswith, endswith, iendswith, # ################################################################################################################################### # 4.双下划线(__)之多表条件关联查询 # 4.1.正向查找(条件) ret3 = models.Book.objects.filter(title='Python').values('id') print(ret3) #[{'id': 1}] # 4.1.1.正向查找(条件)之一对多 ret4 = models.Book.objects.filter(title='Python').values('publisher__city') print(ret4) #[{'publisher__city': '北京'}] # 4.1.2.正向查找(条件)之多对多 ret5 = models.Book.objects.filter(title='Python').values('author__name') print(ret5) ret6 = models.Book.objects.filter(author__name="alex").values('title') print(ret6) # 注意 # 正向查找的publisher__city或者author__name中的publisher,author是book表中绑定的字段 # 一对多和多对多在这里用法没区别 # 4.2.反向查找(条件) # 4.2.1.反向查找之一对多: ret8 = models.Publisher.objects.filter(book__title='Python').values('name') print(ret8) #[{'name': '人大出版社'}] 注意,book__title中的book就是Publisher的关联表名 ret9 = models.Publisher.objects.filter( book__title='Python').values('book__authors') print(ret9) #[{'book__authors': 1}, {'book__authors': 2}] # 4.2.2.反向查找之多对多: ret10 = models.Author.objects.filter(book__title='Python').values('name') print(ret10) #[{'name': 'alex'}, {'name': 'alvin'}] # 注意 # 正向查找的book__title中的book是表名Book # 一对多和多对多在这里用法没区别 # ################################################################################################################################### # 5.聚合查询和分组查询 # 5.1.聚合查询--aggregate(*args,**kwargs):通过对QuerySet进行计算,返回一个聚合值的字典。aggregate()中每一个参数都指定一个包含在字典中的返回值。即在查询集上生成聚合 # 计算所有在售书的平均价钱。 Book.objects.all().aggregate(Avg('price')) # {'price__avg': 34.35} Book.objects.aggregate( average_price=Avg('price')) # {'average_price': 34.35} # 所有图书价格的最大值和最小值: Book.objects.aggregate( Avg('price'), Max('price'), Min('price') ) # {'price__avg': 34.35, 'price__max': Decimal('81.20'), 'price__min': Decimal('12.99')} # 查询alex出的书总价格 Book.objects.filter(author__name='alex').aggregate(Sum('price')) # 5.2.分组查询--annotate(*args,**kwargs):可以通过计算查询结果中每一个对象所关联的对象集合,从而得出总计值(也可以是平均值或总和),即为查询集的每一项生成聚合 # 查询各个作者出的书的总价格,这里就涉及到分组了,分组条件是authors__name Book.objects.values("author__name").annotate(Sum('price')) # 查询各个出版社最便宜的书价是多少 Book.objects.values("publisher__name").annotate(Min('price')) return HttpResponse("前端打印ok")
def addon_aggregates(): return Addon.objects.valid().aggregate(rating=Avg('average_rating'), reviews=Avg('total_reviews'))
def store_air_quality_historic_data(): """ Stores max min and avg values for yesterday """ logger.info('BEGIN -- running task: store_air_quality_historic_data') yesterday = datetime.datetime.now() - datetime.timedelta(days=1) for station in AirQualityStation.objects.active(): try: # @CHECKME maybe data should be filtered date > yesterday 00:06:00 data = AirQualityData.objects.filter( station=station, datetime__year=yesterday.year, datetime__month=yesterday.month, datetime__day=yesterday.day, ) # air quality index max_air_quality_index = data.aggregate( Max('air_quality_index_max')) min_air_quality_index = data.aggregate( Min('air_quality_index_min')) avg_air_quality_index = data.aggregate(Avg('air_quality_index')) # pm1 max_pm1 = data.aggregate(Max('pm1_max')) min_pm1 = data.aggregate(Min('pm1_min')) avg_pm1 = data.aggregate(Avg('pm1')) # pm2.5 max_pm25 = data.aggregate(Max('pm25_max')) min_pm25 = data.aggregate(Min('pm25_min')) avg_pm25 = data.aggregate(Avg('pm25')) # temperature max_pm10 = data.aggregate(Max('pm10_max')) min_pm10 = data.aggregate(Min('pm10_min')) avg_pm10 = data.aggregate(Avg('pm10')) history = HistoricAirQualityData( station=station, date=yesterday.date(), air_quality_index_max=max_air_quality_index[ 'air_quality_index_max__max'], air_quality_index_min=min_air_quality_index[ 'air_quality_index_min__min'], air_quality_index_mean=avg_air_quality_index[ 'air_quality_index_min__avg'], pm1_max=max_pm1['pm1_max__max'], pm1_min=min_pm1['pm1_min__min'], pm1_mean=avg_pm1['pm1__avg'], pm25_max=max_pm25['pm25_max__max'], pm25_min=min_pm25['pm25_min__min'], pm25_mean=avg_pm25['pm25__avg'], pm10_max=max_pm10['pm10_max__max'], pm10_min=min_pm10['pm10_min__min'], pm10_mean=avg_pm10['pm10__avg'], ) history.save() logger.info('station %s history save successfull' % (station.name)) except Exception as e: logger.warn('station %s history save failed: %s' % (station.name, str(e))) # noqa logger.info('END -- running task: store_air_quality_historic_data')
def profile(request, profile_id): user = get_object_or_404(User, id=profile_id) # get user rank # list of all the users allUsers = TotalPoints.objects.order_by('-points') rank = 1 for singleUser in allUsers: if singleUser.user == user: break rank += 1 # get user points user_points = TotalPoints.objects.get(user=user).points # get user Level user_level = Level.objects.get(user=user) # get user coins user_coins = TotalCoins.objects.get(user=user).coins # topics progress all_topics_progress = get_all_topics_progress(user) # average grade calculations average_grade_aggregate = UserAttemptedChallenge.objects.filter( user=user, time_taken__gt=timedelta(seconds=0)).aggregate(Avg('grade')) if average_grade_aggregate['grade__avg']: average_grade = int(average_grade_aggregate['grade__avg']) else: average_grade = 0 # average time taken to solve challenges calculation average_time_taken = UserAttemptedChallenge.objects.filter( user=user, time_taken__gt=timedelta(seconds=0)).aggregate(Avg('time_taken')) average_time = str(average_time_taken['time_taken__avg']).split('.')[0] # total challenges attempted by user user_attempted_challenges = UserAttemptedChallenge.objects.filter( user=user).count() # total questions attempted by user user_attempted_questions = UserAttemptedQuestion.objects.filter( user=user).count() # total videos watched by user user_watched_videos = UserWatchedVideo.objects.filter(user=user).count() # total notes read by user user_read_notes = UserReadNotes.objects.filter(user=user).count() pre_level = user_level.level - 1 next_level = user_level.level + 1 progress = user_level.progress tries_left = 5 - user_level.tries # managing user progress this way because I'm not cool enough rel_progress = 0 stay_progress = 0 pro_progress = 0 if progress == 1: rel_progress = 25 elif progress == 2: rel_progress = 50 elif progress == 3: rel_progress = 75 elif progress == 4: rel_progress = 100 elif progress == 5: rel_progress = 100 stay_progress = 20 elif progress == 6: rel_progress = 100 stay_progress = 40 elif progress == 7: rel_progress = 100 stay_progress = 60 elif progress == 8: rel_progress = 100 stay_progress = 80 elif progress == 9: rel_progress = 100 stay_progress = 100 elif progress == 10: rel_progress = 100 stay_progress = 100 pro_progress = 16.5 elif progress == 11: rel_progress = 100 stay_progress = 100 pro_progress = 33.3 elif progress == 12: rel_progress = 100 stay_progress = 100 pro_progress = 50 elif progress == 13: rel_progress = 100 stay_progress = 100 pro_progress = 66.6 elif progress == 14: rel_progress = 100 stay_progress = 100 pro_progress = 83.3 elif progress == 15: rel_progress = 100 stay_progress = 100 pro_progress = 100 context = { 'user': user, 'rank': rank, 'user_points': user_points, 'user_level': user_level, 'user_coins': user_coins, 'all_topics_progress': all_topics_progress, 'average_grade': average_grade, 'user_attempted_challenges': user_attempted_challenges, 'user_attempted_questions': user_attempted_questions, 'user_watched_videos': user_watched_videos, 'user_read_notes': user_read_notes, 'pre_level': pre_level, 'next_level': next_level, 'tries_left': tries_left, 'rel_progress': rel_progress, 'stay_progress': stay_progress, 'pro_progress': pro_progress, 'average_time': average_time, } return render(request, 'pages/profile.html', context)
def dailyparam_json(request, url_id, abbr): today = datetime.datetime.now() yesterday = datetime.datetime.now() + datetime.timedelta(days=-1) two_days_ago = datetime.datetime.now() + datetime.timedelta(days=-2) three_days_ago = datetime.datetime.now() + datetime.timedelta(days=-3) four_days_ago = datetime.datetime.now() + datetime.timedelta(days=-4) five_days_ago = datetime.datetime.now() + datetime.timedelta(days=-5) six_days_ago = datetime.datetime.now() + datetime.timedelta(days=-6) a_week_ago = datetime.datetime.now() + datetime.timedelta(days=-7) abbr_id = Parameter.objects.get(abbr=abbr).id today_avg = Record.objects. \ filter(parameter=abbr_id). \ filter(station__url_id=url_id). \ filter(timestamp__range=(yesterday, today)). \ aggregate(average=Avg('value')) yesterday_avg = Record.objects. \ filter(parameter=abbr_id). \ filter(station__url_id=url_id). \ filter(timestamp__range=(two_days_ago, yesterday)). \ aggregate(average=Avg('value')) two_days_ago_avg = Record.objects. \ filter(parameter=abbr_id). \ filter(station__url_id=url_id). \ filter(timestamp__range=(three_days_ago, two_days_ago)). \ aggregate(average=Avg('value')) three_days_ago_avg = Record.objects. \ filter(parameter=abbr_id). \ filter(station__url_id=url_id). \ filter(timestamp__range=(four_days_ago, three_days_ago)). \ aggregate(average=Avg('value')) four_days_ago_avg = Record.objects. \ filter(parameter=abbr_id). \ filter(station__url_id=url_id). \ filter(timestamp__range=(five_days_ago, four_days_ago)). \ aggregate(average=Avg('value')) five_days_ago_avg = Record.objects. \ filter(parameter=abbr_id). \ filter(station__url_id=url_id). \ filter(timestamp__range=(six_days_ago, five_days_ago)). \ aggregate(average=Avg('value')) six_days_ago_avg = Record.objects. \ filter(parameter=abbr_id). \ filter(station__url_id=url_id). \ filter(timestamp__range=(a_week_ago, six_days_ago)). \ aggregate(average=Avg('value')) data = dict(url_id=url_id, abbr=abbr, today_avg=today_avg['average'], yesterday_avg=yesterday_avg['average'], two_days_ago_avg=two_days_ago_avg['average'], three_days_ago_avg=three_days_ago_avg['average'], four_days_ago_avg=four_days_ago_avg['average'], five_days_ago_avg=five_days_ago_avg['average'], six_days_ago_avg=six_days_ago_avg['average'], daily_avg=[ today_avg['average'], yesterday_avg['average'], two_days_ago_avg['average'], three_days_ago_avg['average'], four_days_ago_avg['average'], five_days_ago_avg['average'], six_days_ago_avg['average'], ]) return HttpResponse(json.dumps(data))
def timeseries_helper(request): logger.info('%s comes to "/timeseries" endpoint', str(request)) if 'postcode' not in request or request['postcode'] == '': logger.error('No postcode has found on request!') return {'result': 'No postcode has found on request!'} req_postcode = request['postcode'] if 'from_date' not in request or request['from_date'] == '': logger.info( 'No from_date has found on request, taking today as from_date!') now = datetime.datetime.now() from_date = datetime.date(now.year, now.month, now.day) else: from_date = request['from_date'] from_date = str(from_date).split(' ') if str(from_date[0]).lower() not in months: logger.error('Month of the from_date object has not been found!') return { 'result': 'Month of the from_date object has not been found, use proper format!' } from_date_month = int(months[str(from_date[0]).lower()]) try: from_date_year = int(from_date[1]) if int(from_date[1]) <= 0: logger.error( 'Year of the from_date object is less than or equal to 0!') return { 'result': 'Year of the from_date object is less than or equal to 0!' } except: logger.error( 'Cannot parse Year of from_date object into a number!') return { 'result': 'Cannot parse Year of from_date object into a number!' } from_date = datetime.date(from_date_year, from_date_month, 1) if 'to_date' not in request or request['to_date'] == '': logger.info( 'No to_date has found on request, taking today as to_date!') now = datetime.datetime.now() to_date = datetime.date(now.year, now.month, now.day) else: to_date = request['to_date'] to_date = str(to_date).split(' ') if str(to_date[0]).lower() not in months: logger.error('Month of the to_date object has not been found!') return { 'result': 'Month of the to_date object has not been found, use proper format!' } to_date_month = int(months[str(to_date[0]).lower()]) try: to_date_year = int(to_date[1]) if int(to_date[1]) <= 0: logger.error( 'Year of the to_date object is less than or equal to 0!') return { 'result': 'Year of the to_date object is less than or equal to 0!' } except: logger.error( 'Cannot parse Year of from_date object into a number!') return { 'result': 'Cannot parse Year of from_date object into a number!' } to_date = datetime.date(to_date_year, to_date_month, 1) if from_date > to_date: logger.warning('from_date object is closer today than to_date object!') tmp_date = from_date from_date = to_date to_date = tmp_date house_data = HouseData.objects.values('property_type', 'date_of_transfer')\ .annotate(average_price=Avg('price'))\ .filter(postcode=req_postcode, date_of_transfer__range=(from_date, to_date))\ .order_by('date_of_transfer') logger.info('%s records has been found.', str(len(house_data))) for daily_data in house_data: daily_data_array = [ daily_data['date_of_transfer'], math.floor(daily_data['average_price']) ] if daily_data['property_type'] == 'D': response_data[0]['data'].append(daily_data_array) elif daily_data['property_type'] == 'S': response_data[1]['data'].append(daily_data_array) elif daily_data['property_type'] == 'T': response_data[2]['data'].append(daily_data_array) elif daily_data['property_type'] == 'F': response_data[3]['data'].append(daily_data_array) elif daily_data['property_type'] == 'O': response_data[4]['data'].append(daily_data_array) return response_data
def avg_client_pay(self): return sale.objects.all().aggregate(Avg('client_change'))
def get_avg_rating(self): reviews = Review.objects.filter(comic__id=self.id) if reviews: return reviews.aggregate(average=Avg("rating"))["average"] else: return "No Ratings"
def get_average_rating(self, obj): average = obj.ratings.aggregate(Avg('rating')).get('rating__avg') if average is None: return 0 return round(average * 2) / 2
def set_rating(book): rating = UserBookRelation.objects.filter(book=book).aggregate( rating=Avg('rate')).get('rating') book.rating = rating book.save()
def annotations_lead_time(self): annotations = Annotation.objects.filter( Q(task__project=self.id) & Q(ground_truth=False)) return annotations.aggregate( avg_lead_time=Avg('lead_time'))['avg_lead_time']
def recipes(request): form = SearchForm(request.GET) if not form.is_valid(): return JsonResponse(dict(form.errors.items()), status=406) limit = RECIPES_PER_PAGE query = Q() v_next = None if request.user.is_authenticated(): preferences = Preferences.objects.get(pk=request.user.id) preferences.sort_by = form.cleaned_data[ 'sort_by'] if form.cleaned_data['sort_by'] else 1 preferences.has_video = form.cleaned_data['has_video'] preferences.category.clear() preferences.cuisine.clear() preferences.equipment.clear() preferences.ingredient.clear() preferences.save() categories = Category.objects.filter( pk__in=form.cleaned_data['category']) preferences.category.set(categories) cuisines = Cuisine.objects.filter(pk__in=form.cleaned_data['cuisine']) preferences.cuisine.set(cuisines) equipments = Equipment.objects.filter( pk__in=form.cleaned_data['equipment']) preferences.equipment.set(equipments) ingredients = Ingredient.objects.filter( pk__in=form.cleaned_data['ingredient']) preferences.ingredient.set(ingredients) preferences.save() #Update Location if form.cleaned_data['location_lat'] and form.cleaned_data[ 'location_lon']: request.user.location_lat = form.cleaned_data['location_lat'] request.user.location_lon = form.cleaned_data['location_lon'] request.user.save() #Filter if form.cleaned_data['search']: lista = form.cleaned_data['search'].split() query &= reduce(lambda x, y: x | y, [Q(title__icontains=word) for word in lista]) if form.cleaned_data['user_id']: query &= Q(cook__id=form.cleaned_data['user_id']) if form.cleaned_data['has_video']: query &= ~Q(video_link="") if form.cleaned_data['category']: lista = form.cleaned_data['category'] query &= Q(category_set__id__in=lista) if form.cleaned_data['equipment']: lista = form.cleaned_data['equipment'] query &= Q(equipment_set__id__in=lista) if form.cleaned_data['cuisine']: lista = form.cleaned_data['cuisine'] query &= Q(cuisine_set__id__in=lista) if form.cleaned_data['has_video']: query &= ~Q(video_link="") #Order if form.cleaned_data['sort_by'] == Sort.difficulty: sort = 'difficulty' elif form.cleaned_data['sort_by'] == Sort.calories: sort = '-calories' elif form.cleaned_data['sort_by'] == Sort.tastiness: sort = '-tastiness' elif form.cleaned_data['sort_by'] == Sort.time: sort = 'time' else: sort = '-views' #Execute Query recipes = Recipe if query: recipes = Recipe.objects.filter(query) else: recipes = Recipe.objects.filter() #Remove recipes that have ingredients which are not included in the list if form.cleaned_data['ingredient']: lista = form.cleaned_data['ingredient'] #Get the names of the selected ingredients temp_names = Ingredient.objects.filter( id__in=lista).values_list('name') ingredient_names = [] for i in temp_names: ingredient_names.append(str(i[0])) #Include to the list other ingredients with similar names ingredient_query = reduce( lambda x, y: x | y, [Q(name__icontains=name) for name in ingredient_names]) temp_ids = Ingredient.objects.filter(ingredient_query).values_list( 'id') ingredients_id = [] for i in temp_ids: ingredients_id.append(str(i[0])) recipes = recipes.annotate( found_ingredients = Count( Case( When(ingredients__ingredient_id__in = ingredients_id, then=1), output_field=IntegerField() ) ), missing_ingredients = Count( Case( When(~Q(ingredients__ingredient_id__in = ingredients_id), then=1), output_field=IntegerField() ) ) )\ .exclude(found_ingredients = 0) sort = "missing_ingredients" new_recipes = recipes \ .annotate(difficulty=Coalesce(Avg('rating__difficulty'),10), \ tastiness=Coalesce(Avg('rating__tastiness'),-1),) \ .order_by(sort) #TODO: More elaborate query for homepage skip = form.cleaned_data['skip'] data = new_recipes[skip:skip + limit] next_url_filters = [] if (len(new_recipes[skip + limit:])): path = request.path v_next = request.get_full_path() v_next = v_next.replace(path, '') v_next = re.sub(r'(\&)?skip=\d+', '', v_next) v_next += '&skip=%d' % (skip + limit) v_next = v_next.replace('?', '') v_next = '%s?%s' % (path, v_next) result = { "data": list(map(lambda x: x.to_json(), data)), "next": v_next, } return JsonResponse(result, safe=False)
def approve(request, submission_id): method = request.POST.get('_method', '').lower() if method == 'put': dc = DirectorComments.objects.get( nomination_submitted_id=submission_id, user=request.user) dc.comment = request.POST['comment'] dc.save() status = request.POST['selected_option'] nomination_submitted = NominationSubmitted.objects.get( id=submission_id) if status.lower() == 'approved': nomination_submitted.status = 2 elif status.lower() == 'dismissed': nomination_submitted.status = 3 else: nomination_submitted.status = 4 nomination_submitted.updated_at = timezone.now() nomination_submitted.save() return redirect('nominate_app:approve', submission_id=nomination_submitted.id) if request.method == "POST": dc = DirectorComments(nomination_submitted_id=submission_id, user=request.user, comment=request.POST['comment']) dc.save() status = request.POST['selected_option'] nomination_submitted = NominationSubmitted.objects.get( id=submission_id) if status.lower() == 'approved': nomination_submitted.status = 2 elif status.lower() == 'dismissed': nomination_submitted.status = 3 else: nomination_submitted.status = 4 nomination_submitted.updated_at = timezone.now() nomination_submitted.save() # to manager and jury telling results will be declared soon subject_all = 'Results coming soon !!!' to_manager_and_tech_jury = list(chain(User.objects.filter(groups__name='Manager'), \ User.objects.filter(groups__name='Technical Jury Member'))) for recipient in to_manager_and_tech_jury: message_value_html_template = render_to_string('nominate_app/emails/director_final_submission.html', \ {'director_name':request.user.first_name, 'name':recipient.username, 'template': nomination_submitted.nomination.award_template}) plain_message_value = strip_tags(message_value_html_template) send_mail(subject=subject_all, from_email='*****@*****.**', \ recipient_list=[str(recipient.email)], html_message=message_value_html_template, message=plain_message_value, fail_silently=False) # to admin telling the process is complete subject_completion = "completion of the cycle" to_admin = User.objects.filter(groups__name='Admin') for admin in to_admin: message_value_html_template = render_to_string('nominate_app/emails/admins.html', \ {'admin_name':admin.username, 'template':nomination_submitted.nomination.award_template, 'link':str(os.environ['SERVER_NAME'] + reverse('nominate_app:approve',args=(submission_id,)))}) plain_message_value = strip_tags(message_value_html_template) send_mail(subject=subject_completion, from_email='*****@*****.**', \ recipient_list=[str(admin.email)], html_message=message_value_html_template, message=plain_message_value, fail_silently=False) return redirect('nominate_app:approval') nomination_submitted = NominationSubmitted.objects.get(id=submission_id) ratings = NominationRating.objects.filter( submission_id=nomination_submitted.id) avg_rating = NominationRating.objects.filter( submission_id=submission_id).aggregate(Avg('rating'))['rating__avg'] created = DirectorComments.objects.filter( nomination_submitted=nomination_submitted, user=request.user).exists() if created: comment = nomination_submitted.director_comment.first().comment status = nomination_submitted.get_status(nomination_submitted.status) if nomination_submitted.is_published: return render( request, 'nominate_app/approvals/show.html', { 'selected_nomination': nomination_submitted, 'avg_rating': avg_rating, 'ratings': ratings, 'status': status, 'comment': comment }) return render( request, 'nominate_app/approvals/edit.html', { 'selected_nomination': nomination_submitted, 'avg_rating': avg_rating, 'ratings': ratings, 'status': status, 'comment': comment }) return render( request, 'nominate_app/approvals/new.html', { 'selected_nomination': nomination_submitted, 'avg_rating': avg_rating, 'ratings': ratings })