def combine_index(field_a, field_b): vector = SearchVector(Value(field_a), config=settings.SEARCH_CONFIG, weight="A") + \ SearchVector(Value(field_b), config=settings.SEARCH_CONFIG, weight="B") return vector
def __init__(self, expression, text, replacement=Value(''), **extra): super().__init__(expression, text, replacement, **extra)
def test_cast_from_value(self): numbers = Author.objects.annotate( cast_integer=Cast(Value('0'), models.IntegerField())) self.assertEqual(numbers.get().cast_integer, 0)
def as_mysql(self, compiler, connection, **extra_context): clone = self.copy() # If no precision is provided, set it to the maximum. if len(clone.source_expressions) < 2: clone.source_expressions.append(Value(100)) return clone.as_sql(compiler, connection, **extra_context)
def get_substr(self): return Substr(self.source_expressions[0], Value(1), self.source_expressions[1])
def test_update_TimeField_using_Value(self): Time.objects.create() Time.objects.update(time=Value(datetime.time(1), output_field=TimeField())) self.assertEqual(Time.objects.get().time, datetime.time(1))
def __init__(self, expression, string, **extra): if not hasattr(string, 'resolve_expression'): string = Value(string) super().__init__(expression, string, **extra)
def get_annotation(self, cte): return Coalesce(cte.col.assessment_count, Value(0), output_field=IntegerField())
def build_coach_condition(self, role_visibility): return [BooleanComparison(role_visibility, "=", Value(roles.COACH))]
def get_db_prep_save(self, value, connection): prepped_value = super(CompressedBinaryField, self).get_db_prep_save(value, connection) if connection.vendor == 'mysql': return Func(Value(prepped_value), function='COMPRESS') return prepped_value
def get_prep_lhs(self): if hasattr(self.lhs, "resolve_expression"): return self.lhs return Value(self.lhs)
def get(self, request, format=None): songs = Song.objects.all() if 'albumId' in request.query_params: songs = Album.objects.get( id=request.query_params.get('albumId')).songs.all() if 'name' in request.query_params: songs = Song.objects.filter( Q(title__contains=request.query_params.get('name')) | Q(performer__contains=request.query_params.get('name'))) if 'genres' in request.query_params: songs = songs.filter( genre__in=request.query_params.get('genres').split(',')) if 'yearSince' in request.query_params: songs = songs.filter( year__gte=request.query_params.get('yearSince')) if 'yearTo' in request.query_params: songs = songs.filter(year__lte=request.query_params.get('yearTo')) marks_subquery = SongMark.objects.filter(song_id=OuterRef('id')) marks_subquery = marks_subquery.annotate( dummy=Value(1)).values('dummy').annotate( marks_avg=Avg('mark')).values_list('marks_avg') songs = songs.annotate(marks_avg=marks_subquery) comments_subquery = SongComment.objects.filter(song_id=OuterRef('id')) comments_subquery = comments_subquery.annotate( dummy=Value(1)).values('dummy').annotate( count=Count('*')).values_list('count') songs = songs.annotate(comments_count=comments_subquery) if 'mark' in request.query_params: mark_filter = request.query_params.get('mark_filter') if mark_filter == 'lte': songs = songs.filter( marks_avg__lte=request.query_params.get('mark')) elif mark_filter == 'gte': songs = songs.filter( marks_avg__gte=request.query_params.get('mark')) elif mark_filter == 'exact': songs = songs.filter( marks_avg=request.query_params.get('mark')) elif mark_filter == 'gt': songs = songs.filter( marks_avg__gt=request.query_params.get('mark')) elif mark_filter == 'lt': songs = songs.filter( marks_avg__lt=request.query_params.get('mark')) if request.user.id is not None: favourite_subquery = FavouriteSong.objects.filter( author_id=request.user.id, song_id=OuterRef('id')).values('id') songs = songs.annotate(favourite=favourite_subquery) if 'favourite' in request.query_params: if request.query_params.get( 'favourite' ) and request.query_params.get('favourite') != 'false': songs = songs.filter(favourite__isnull=False) if 'offset' in request.query_params: offset = int(request.query_params.get('offset')) else: offset = 0 if 'sortMode' in request.query_params: songs = songs.order_by(request.query_params.get('sortMode')) songs = songs[offset:offset + 20] serializer = SongSerializer(songs, many=True) return Response(serializer.data)
def as_sqlite(self, compiler, connection, **extra_context): clone = self.copy() if len(self.source_expressions) < 4: # Always provide the z parameter for ST_Translate clone.source_expressions.append(Value(0)) return super(Translate, clone).as_sqlite(compiler, connection, **extra_context)
def __init__(self, config): super().__init__() if not hasattr(config, 'resolve_expression'): config = Value(config) self.config = config
def phase01a(request, previewMode=False): # assignmentID for front-end submit javascript assignmentId = request.GET.get('assignmentId') # Need to check if request.method == 'POST': postList = pushPostList(request) # Get the Q and Ans for the current question, they should be at least one Q&A for all of the set questions = request.POST.getlist('data_q[]') answers = request.POST.getlist('data_a[]') # print("I got questions: ", questions) # print("I got answers: ", answers) # retrieve the json data for updating skip count for the previous questions validation_list = request.POST.getlist('data[]') correct_qs = [] for q in questions: text = q.replace(' ', '+') url = f'https://api.textgears.com/check.php?text={text}&key=SFCKdx4GHmSC1j6H' response = requests.get(url) wordsC = response.json() # print(wordsC) for err in wordsC['errors']: bad = err['bad'] good = err['better'] if good: q = q.replace(bad, good[0]) correct_qs.append(q) # Query list for the old data in the table old_Qs = list( Question.objects.filter(isFinal=True).values_list('text', 'id')) # print("old questions", old_Qs) questions = Question.objects.bulk_create([ Question(text=que, isFinal=False, imageID=list(ImageModel.objects.filter(id__in=postList)), hit_id=assignmentId) for que in correct_qs ]) new_Qs = [ (que.text, que.id) for que in questions ] # list(map(attrgetter('text', 'id'), questions)) # don't know which is better speedwise # print("new question", new_Qs) # Call the NLP function and get back with results, it should be something like wether it gets merged or kept # backend call NLP and get back the results, it should be a boolean and a string telling whether the new entry will be created or not # exist_q should be telling which new question got merged into acceptedList, id_merge, id_move = [que.id for que in questions], {}, {} id_merge = {int(k): v for k, v in id_merge.items()} id_move = {int(k): v for k, v in id_move.items()} # print("acceptedList is: ", acceptedList) # print("id_merge is: ", id_merge) # print("id_move is: ", id_move) Question.objects.filter(id__in=acceptedList).update(isFinal=True) # Question.objects.filter(id__in=[que.id for que in questions if que.id not in id_merge]).update(isFinal=True) # Store id_merge under mergeParent in the database id_merge_sql = Case( *[When(id=new, then=Value(old)) for new, old in id_merge.items()]) Question.objects.filter(id__in=id_merge).update( mergeParent=id_merge_sql) answers = Answer.objects.bulk_create([ Answer(question_id=id_merge.get(que.id, que.id), text=ans, hit_id=assignmentId, imgset=-1) for que, ans in zip(questions, answers) ]) with transaction.atomic(): id_move_sql = Case(*[ When(question_id=bad, then=Value(good)) for bad, good in id_move.items() ]) Answer.objects.filter(question_id__in=id_move).update( question_id=id_move_sql) id_move_sql = Case(*[ When(id=bad, then=Value(good)) for bad, good in id_move.items() ]) Question.objects.filter(id__in=id_move).update( isFinal=False, mergeParent=id_move_sql) Question.objects.filter(id__in=id_move.values()).update( isFinal=True) return HttpResponse(status=201) # Get rounds played in total and by the current player rounds, roundsnum = popGetList( ImageModel.objects.filter(img__startswith=KEYRING).values_list( 'id', flat=True)) if len(rounds.post) >= ImageModel.objects.filter( img__startswith=KEYRING).count(): # push all to waiting page return over(request, 'phase01a', assignmentId) # Single image that will be sent to front-end, will expire in 300 seconds (temporary) # sending 4 images at a time data = [i.img.url for i in ImageModel.objects.filter(id__in=roundsnum)] data.extend([None] * (3 - len(data))) if all([d is None for d in data]): return over(request, 'phase01a', assignmentId) # print("I got: ", serving_img_url) # Previous all question pairs that will be sent to front-end # Get all the instructions instructions = Phase01_instruction.get_queryset(Phase01_instruction) or [ 'none' ] # Get all of the questions previous_questions = list( Question.objects.filter(isFinal=True).values_list('text', flat=True)) return render( request, 'phase01a.html', { 'url': data, 'imgnum': roundsnum, 'questions': previous_questions, 'assignmentId': assignmentId, 'previewMode': previewMode, 'instructions': instructions, 'NUMROUNDS': NUMROUNDS[phase01a.__name__], 'object': OBJECT_NAME_PLURAL })
def build_kind_condition(self, kind_id, value, comparison="="): return [BooleanComparison(kind_id, comparison, Value(value))]
def as_sqlite(self, compiler, connection): if len(self.source_expressions) < 4: # Always provide the z parameter for ST_Translate self.source_expressions.append(Value(0)) return super(Translate, self).as_sqlite(compiler, connection)
def __init__(self, **fields): expressions = [] for key, value in fields.items(): expressions.extend((Value(key), value)) super().__init__(*expressions)
def test_update_UUIDField_using_Value(self): UUID.objects.create() UUID.objects.update(uuid=Value(uuid.UUID('12345678901234567890123456789012'), output_field=UUIDField())) self.assertEqual(UUID.objects.get().uuid, uuid.UUID('12345678901234567890123456789012'))
def save(self, *args, **kwargs): def add_fieldset_field_for_attribute(search_fields, attr, fieldset): key = attr.identifier while attr.fieldset_attribute_target.count(): attr = attr.fieldset_attribute_target.get().attribute_source if not fieldset: fieldset = self.attribute_data.get(attr.identifier) if not fieldset: return for field in fieldset: value = field.get(key) # log.info('%s__%s: %s' % (attr.identifier, key, value)) if not value: continue if type( value ) is list and attr.value_type == Attribute.TYPE_FIELDSET: sources = FieldSetAttribute.objects.filter( attribute_source__identifier=key) for source in sources: add_fieldset_field_for_attribute( search_fields, source.attribute_target, value) else: search_fields.add( Value(check_get_name(value), output_field=models.TextField())) def add_search_field_for_attribute(search_fields, attr): if attr.static_property: value = getattr(self, attr.static_property, None) if value: search_fields.add( Value(check_get_name(value), output_field=models.TextField())) elif not attr.fieldset_attribute_target.count(): value = self.attribute_data.get(attr.identifier) if value and attr.value_type != Attribute.TYPE_FIELDSET: search_fields.add( Value(check_get_name(value), output_field=models.TextField())) else: add_fieldset_field_for_attribute(search_fields, attr, None) return def check_get_name(value): if type(value) != str: return value from uuid import UUID try: _ = UUID(value, version=4) except ValueError: return value return get_in_personnel_data(value, 'name', False) # TODO: check if required # set_ad_data_in_attribute_data(self.attribute_data) search_fields = set() for attr in Attribute.objects.filter(searchable=True): add_search_field_for_attribute(search_fields, attr) search_fields.add(Value(self.subtype, output_field=models.TextField())) search_fields.add(Value(self.user, output_field=models.TextField())) # log.info(search_fields) self.vector_column = SearchVector(*list(search_fields)) super(Project, self).save(*args, **kwargs) if not self.pino_number: self.pino_number = str(self.pk).zfill(7) self.save()
def apply_kml_annotations(qs): return qs.annotate( address=Concat( 'meetinginfo__location_text', Case(When((~Q(meetinginfo__location_text=None) & ~Q(meetinginfo__location_text='')) & ((~Q(meetinginfo__location_street=None) & ~Q(meetinginfo__location_street='')) | (~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection='')) | (~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_street', Case(When((~Q(meetinginfo__location_street=None) & ~Q(meetinginfo__location_street='')) & ((~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection='')) | (~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), Case(When(~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection=''), then=F('meetinginfo__location_city_subsection')), default=Value(''), output_field=models.CharField()), Case(When((~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection='')) & ((~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), Case(When(Q(meetinginfo__location_city_subsection=None) | Q(meetinginfo__location_city_subsection=''), then=F('meetinginfo__location_municipality')), default=Value(''), output_field=models.CharField()), Case(When(((Q(meetinginfo__location_city_subsection=None) | Q(meetinginfo__location_city_subsection='')) & ~Q(meetinginfo__location_municipality=None) & ~Q(meetinginfo__location_municipality='')) & ((~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_province', Case(When((~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) & ((~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_postal_code_1', Case(When((~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) & (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation='')), then=Value(', ')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_nation'), description=Concat( Case(When(weekday=1, then=Value('Sunday, ')), When(weekday=2, then=Value('Monday, ')), When(weekday=3, then=Value('Tuesday, ')), When(weekday=4, then=Value('Wednesday, ')), When(weekday=5, then=Value('Thursday, ')), When(weekday=6, then=Value('Friday, ')), When(weekday=7, then=Value('Saturday, ')), default=Value(''), output_field=models.CharField()), Func(F('start_time'), Value('fmHH:MI AM'), function='to_char', output_field=models.CharField()), Value(', '), 'meetinginfo__location_street', Case(When((~Q(meetinginfo__location_street=None) & ~Q(meetinginfo__location_street='')) & ((~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection='')) | (~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), Case(When(~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection=''), then=F('meetinginfo__location_city_subsection')), default=Value(''), output_field=models.CharField()), Case(When((~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection='')) & ((~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), Case(When(Q(meetinginfo__location_city_subsection=None) | Q(meetinginfo__location_city_subsection=''), then=F('meetinginfo__location_municipality')), default=Value(''), output_field=models.CharField()), Case(When(((Q(meetinginfo__location_city_subsection=None) | Q(meetinginfo__location_city_subsection='')) & ~Q(meetinginfo__location_municipality=None) & ~Q(meetinginfo__location_municipality='')) & ((~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_province', Case(When((~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) & ((~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(', ')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_postal_code_1', Case(When((~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) & (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation='')), then=Value(', ')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_nation', Case(When(~Q(meetinginfo__location_info=None) & ~Q(meetinginfo__location_info=''), then=Value(' ')), default=Value(''), output_field=models.CharField()), Case(When((~Q(meetinginfo__location_info=None) & ~Q(meetinginfo__location_info='')) & ((~Q(meetinginfo__location_street=None) & ~Q(meetinginfo__location_street='')) | (~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection='')) | (~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value('(')), default=Value(''), output_field=models.CharField()), 'meetinginfo__location_info', Case(When((~Q(meetinginfo__location_info=None) & ~Q(meetinginfo__location_info='')) & ((~Q(meetinginfo__location_street=None) & ~Q(meetinginfo__location_street='')) | (~Q(meetinginfo__location_city_subsection=None) & ~Q(meetinginfo__location_city_subsection='')) | (~Q(meetinginfo__location_province=None) & ~Q(meetinginfo__location_province='')) | (~Q(meetinginfo__location_postal_code_1=None) & ~Q(meetinginfo__location_postal_code_1='')) | (~Q(meetinginfo__location_nation=None) & ~Q(meetinginfo__location_nation=''))), then=Value(')')), default=Value(''), output_field=models.CharField()), output_field=models.CharField()), coordinates=Concat(Cast('longitude', models.FloatField()), Value(','), Cast('latitude', models.FloatField()), Value(',0'), output_field=models.CharField()))
def do_filter_ranking(self, engine_slug, queryset, search_text): """Ranks the given queryset according to the relevance of the given search text.""" return queryset.annotate( watson_rank=Value(1.0, output_field=FloatField()))
def __init__(self, expression, string, **extra): if not hasattr(string, 'resolve_expression'): string = Value(string) super(TrigramBase, self).__init__(expression, string, output_field=FloatField(), **extra)
def __init__(self, *expressions, **extra): super().__init__(*expressions, **extra) self.source_expressions = [ Coalesce(expression, Cast(Value('""'), JSONField())) for expression in self._parse_expressions(*expressions) ]
def __init__(self, expression, length, fill_text=Value(' '), **extra): if not hasattr( length, 'resolve_expression') and length is not None and length < 0: raise ValueError("'length' must be greater or equal to 0.") super().__init__(expression, length, fill_text, **extra)
def _as_sql_substr(self, compiler, connection): if len(self.get_source_expressions()) < 3: self.get_source_expressions().append(Value(2**31 - 1)) return self.as_sql(compiler, connection)
def get_substr(self): return Substr(self.source_expressions[0], self.source_expressions[1] * Value(-1))
def name(self): return Concat(F("first_name"), Value(" "), F("last_name"), output_field=models.TextField())
def get_ordering_value(self, qs: QuerySet, value: Any) -> Tuple[QuerySet, OrderingFieldType]: return qs, CombinedExpression(F(self.field_name), "->", Value(value))
def merge(self, agency, user): """Merge the other agency into this agency""" replace_relations = [ 'foiarequest_set', 'foiamachinerequest_set', 'reviewagencytask_set', 'flaggedtask_set', 'newagencytask_set', 'staleagencytask_set', ] for relation in replace_relations: getattr(agency, relation).update(agency=self) replace_self_relations = [ ('appeal_agency', 'appeal_for'), ('payable_to', 'receivable'), ('parent', 'children'), ] for forward, backward in replace_self_relations: getattr(agency, backward).update(**{forward: self}) replace_m2m = [ 'composers', 'multirequests', 'types', 'foiasavedsearch_set', ] for relation in replace_m2m: getattr(self, relation).add(*getattr(agency, relation).all()) getattr(agency, relation).clear() # move emails/phone numbers/addresses over, # with types set to 'none', if doesn't already exist # on new agency (with any types) agency.agencyemail_set.exclude(email__in=self.emails.all()).update( request_type='none', email_type='none', agency=self) agency.agencyphone_set.exclude(phone__in=self.phones.all()).update( request_type='none', agency=self) agency.agencyaddress_set.exclude( address__in=self.addresses.all(), ).update(request_type='none', agency=self) # just update user on comms # we don't want to create a user for the bad agency if one doesn't exist try: old_user = agency.profile.user new_user = self.get_user() old_user.sent_communications.update(from_user=new_user) old_user.received_communications.update(to_user=new_user) except Profile.DoesNotExist: pass # mark the old agency as rejected and leave a note that it was merged agency.status = 'rejected' agency.notes = (Concat( F('notes'), Value(u'\n\nThis agency was merged into agency ' u'"{}" (#{}) by {} on {}'.format( self.name, self.pk, user.username, timezone.now(), )))) agency.save() self.notes = (Concat( F('notes'), Value(u'\n\nAgency "{}" (#{}) was merged into this agency ' u'by {} on {}'.format( agency.name, agency.pk, user.username, timezone.now(), )))) self.save()