def convert(self, value): if isinstance(value, basestring): match = DATETIME_REGEX.search(value) if match: data = match.groupdict() return make_aware( datetime_safe.datetime( int(data["year"]), int(data["month"]), int(data["day"]), int(data["hour"]), int(data["minute"]), int(data["second"]), ) ) else: raise ApiFieldError( "Datetime provided to '%s' field doesn't appear to be a " "valid datetime string: '%s'" % (self._field_name, value) ) return make_aware( datetime_safe.datetime(value.year, value.month, value.day, value.hour, value.minute, value.second) )
def test_archive_with_two_year_separated_blogposts(self): example_blogpost1 = create_dummy_blogpost_posted_at_datetime( "example", datetime(1990, 10, 20, 14, 20, 00) ) example_blogpost2 = create_dummy_blogpost_posted_at_datetime( "example2", datetime(2004, 8, 10, 12, 00, 10) ) resp = self.client.get(reverse("blog:archive")) archive_dic = resp.context["archive"] expected_archive_contents = \ ((2004, (8, (10, [example_blogpost2]) ) ), (1990, (10, (20, [example_blogpost1]), ), ), ) self.assertEqual(archive_dic, self.expand_tuples_to_ordered_dic( expected_archive_contents)) example_blogpost1.delete() example_blogpost2.delete()
def test_compare_datetimes(self): self.assertEqual(original_datetime(*self.more_recent), datetime(*self.more_recent)) self.assertEqual(original_datetime(*self.really_old), datetime(*self.really_old)) self.assertEqual(original_date(*self.more_recent), date(*self.more_recent)) self.assertEqual(original_date(*self.really_old), date(*self.really_old)) self.assertEqual(original_date(*self.just_safe).strftime('%Y-%m-%d'), date(*self.just_safe).strftime('%Y-%m-%d')) self.assertEqual(original_datetime(*self.just_safe).strftime('%Y-%m-%d'), datetime(*self.just_safe).strftime('%Y-%m-%d'))
def test__to_python(self): self.assertEqual(self.sb._to_python('abc'), 'abc') self.assertEqual(self.sb._to_python('1'), 1) self.assertEqual(self.sb._to_python('2653'), 2653) self.assertEqual(self.sb._to_python('25.5'), 25.5) self.assertEqual(self.sb._to_python('[1, 2, 3]'), [1, 2, 3]) self.assertEqual(self.sb._to_python('{"a": 1, "b": 2, "c": 3}'), {'a': 1, 'c': 3, 'b': 2}) self.assertEqual(self.sb._to_python('2009-05-09T16:14:00'), datetime(2009, 5, 9, 16, 14)) self.assertEqual(self.sb._to_python('2009-05-09T00:00:00'), datetime(2009, 5, 9, 0, 0)) self.assertEqual(self.sb._to_python(None), None)
def test__to_python(self): self.assertEqual(self.sb._to_python("abc"), "abc") self.assertEqual(self.sb._to_python("1"), 1) self.assertEqual(self.sb._to_python("2653"), 2653) self.assertEqual(self.sb._to_python("25.5"), 25.5) self.assertEqual(self.sb._to_python("[1, 2, 3]"), [1, 2, 3]) self.assertEqual(self.sb._to_python("(1, 2, 3)"), (1, 2, 3)) self.assertEqual(self.sb._to_python('{"a": 1, "b": 2, "c": 3}'), {"a": 1, "c": 3, "b": 2}) self.assertEqual(self.sb._to_python("2009-05-09T16:14:00"), datetime(2009, 5, 9, 16, 14)) self.assertEqual(self.sb._to_python("2009-05-09T00:00:00"), datetime(2009, 5, 9, 0, 0)) self.assertEqual(self.sb._to_python(None), None)
def test_last_login_not_changed(self): self.destination_user.last_login = datetime(2000, 1, 1) self.destination_user.save() self.client.login(username='******', password='******') response = self.client.post( reverse('login_as_user', args=[self.destination_user.id]) ) self.destination_user = User.objects.get(pk=self.destination_user.pk) self.assertEqual(self.destination_user.last_login, datetime(2000, 1, 1)) # Check the update_last_login function has been reconnected to the user_logged_in signal connections = [str(ref[1]) for ref in auth.user_logged_in.receivers if 'update_last_login' in str(ref[1])] self.assertTrue(connections)
def test__from_python(self): self.assertEqual(self.sb._from_python('abc'), u'abc') self.assertEqual(self.sb._from_python(1), u'1') self.assertEqual(self.sb._from_python(2653), u'2653') self.assertEqual(self.sb._from_python(25.5), u'25.5') self.assertEqual(self.sb._from_python([1, 2, 3]), u'[1, 2, 3]') self.assertEqual(self.sb._from_python((1, 2, 3)), u'(1, 2, 3)') self.assertEqual(self.sb._from_python({'a': 1, 'c': 3, 'b': 2}), u"{'a': 1, 'c': 3, 'b': 2}") self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 16, 14)), u'2009-05-09T16:14:00') self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 0, 0)), u'2009-05-09T00:00:00') self.assertEqual(self.sb._from_python(datetime(1899, 5, 18, 0, 0)), u'1899-05-18T00:00:00') self.assertEqual(self.sb._from_python(datetime(2009, 5, 18, 1, 16, 30, 250)), u'2009-05-18T01:16:30') # Sorry, we shed the microseconds.
def test_safe_strftime(self): self.assertEqual(date(*self.just_unsafe[:3]).strftime('%Y-%m-%d (weekday %w)'), '1899-12-31 (weekday 0)') self.assertEqual(date(*self.just_safe).strftime('%Y-%m-%d (weekday %w)'), '1900-01-01 (weekday 1)') self.assertEqual(datetime(*self.just_unsafe).strftime('%Y-%m-%d %H:%M:%S (weekday %w)'), '1899-12-31 23:59:59 (weekday 0)') self.assertEqual(datetime(*self.just_safe).strftime('%Y-%m-%d %H:%M:%S (weekday %w)'), '1900-01-01 00:00:00 (weekday 1)') # %y will error before this date self.assertEqual(date(*self.just_safe).strftime('%y'), '00') self.assertEqual(datetime(*self.just_safe).strftime('%y'), '00') self.assertEqual(date(1850, 8, 2).strftime("%Y/%m/%d was a %A"), '1850/08/02 was a Friday')
def test__from_python(self): self.assertEqual(self.sb._from_python("abc"), u"abc") self.assertEqual(self.sb._from_python(1), u"1") self.assertEqual(self.sb._from_python(2653), u"2653") self.assertEqual(self.sb._from_python(25.5), u"25.5") self.assertEqual(self.sb._from_python([1, 2, 3]), u"[1, 2, 3]") self.assertEqual(self.sb._from_python((1, 2, 3)), u"(1, 2, 3)") self.assertEqual(self.sb._from_python({"a": 1, "c": 3, "b": 2}), u"{'a': 1, 'c': 3, 'b': 2}") self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 16, 14)), u"2009-05-09T16:14:00") self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 0, 0)), u"2009-05-09T00:00:00") self.assertEqual(self.sb._from_python(datetime(1899, 5, 18, 0, 0)), u"1899-05-18T00:00:00") self.assertEqual( self.sb._from_python(datetime(2009, 5, 18, 1, 16, 30, 250)), u"2009-05-18T01:16:30" ) # Sorry, we shed the microseconds.
def test_valid_info_saving(self): """ Tests saving of valid data """ new_data = {key: self.user[key] + self.rand_str() for key in self.user} new_data['date_of_birth'] = datetime( random.randrange(1970, 2000), random.randrange(1, 12), random.randrange(25) ).strftime('%Y-%m-%d') new_data['email'] = '%s@%s.%s' % ( self.rand_str(), self.rand_str(5), self.rand_str(3)) # TODO test photo field s_response = self.client.post('/contacts/edit', new_data).content s_response = json.loads(s_response.decode('utf-8')) self.assertEqual(s_response['status'], 'success', "Wrong server resp.") new_s_data = BeautifulSoup( self.client.get('/contacts/edit').content, 'html.parser') for key in new_data: form_item = new_s_data.find('input', {'name': key}) form_value = form_item.get('value') if form_item \ else new_s_data.find('textarea', {'name': key}).text self.assertEqual( new_data[key], form_value.strip(), 'Value of %s is wrong' % (key, ) )
def get_specific_date(day, month, year): date = datetime(int(year), int(month), int(day)) if date.date() > datetime.now().date(): return DiningList.get_latest() return DiningList.objects.get_or_create(relevant_date=date)[0]
def getPledgesMonthlyCount(self, monthdate=None): from bitfund.pledger.models import DonationTransaction, DONATION_TRANSACTION_TYPES_CHOICES, DONATION_TRANSACTION_STATUSES_CHOICES if monthdate is None: monthdate = now() return (DonationTransaction.objects .filter(accepting_project=self.project) .filter(transaction_type=DONATION_TRANSACTION_TYPES_CHOICES.pledge) .filter(transaction_datetime__gte=datetime(monthdate.year, monthdate.month, 1, tzinfo=monthdate.tzinfo)) .filter(transaction_datetime__lt=datetime(monthdate.year, monthdate.month + 1, 1, tzinfo=monthdate.tzinfo)) .exclude(transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled) .count() )
def convert(self, value): if value is None: return None if isinstance(value, six.string_types): match = DATETIME_REGEX.search(value) if match: data = match.groupdict() return make_aware( datetime_safe.datetime( int(data["year"]), int(data["month"]), int(data["day"]), int(data["hour"]), int(data["minute"]), int(data["second"]), ) ) else: raise ApiFieldError( "Datetime provided to '%s' field doesn't appear to be a valid datetime string: '%s'" % (self.instance_name, value) ) return value
def _to_python(self, value): """ Converts values from Whoosh to native Python values. A port of the same method in pysolr, as they deal with data the same way. """ if value == 'true': return True elif value == 'false': return False if value and isinstance(value, six.string_types): possible_datetime = DATETIME_REGEX.search(value) if possible_datetime: date_values = possible_datetime.groupdict() for dk, dv in date_values.items(): date_values[dk] = int(dv) return datetime(date_values['year'], date_values['month'], date_values['day'], date_values['hour'], date_values['minute'], date_values['second']) try: # Attempt to use json to load the values. converted_value = json.loads(value) # Try to handle most built-in types. if isinstance(converted_value, (list, tuple, set, dict, six.integer_types, float, complex)): return converted_value except: # If it fails (SyntaxError or its ilk) or we don't trust it, # continue on. pass return value
def testAttendaceList(self): lesson = Lesson.objects.get(pk=1) try: # delete existing attendance list to simulate that one is created, # when accessing it lesson.attendancelist.delete() except AttendanceList.DoesNotExist: #@UndefinedVariable pass response = self.client.get(lesson.get_attendance_list_url()) eq_(200, response.status_code) data = {'classroom':'1', 'start':'2008-01-01', 'end':'2008-01-01 01:00', 'lector':'1', 'course_members':['1','2']} response = self.client.post(lesson.get_attendance_list_url(), data) self.assertRedirects(response, lesson.get_attendance_list_url()) lesson = Lesson.objects.get(pk=1) eq_(lector_price(Lector.objects.get(pk=1), Course.objects.get(pk=1), datetime(2008, 1, 1), datetime(2008, 1, 1, 1)), lesson.attendancelist.lector_price) eq_([True,True,False], [a.present for a in lesson.attendancelist.lessonattendee_set.all()]) data['course_members'] = ['1', '3'] response = self.client.post(lesson.get_attendance_list_url(), data) eq_([True,False,True], [a.present for a in lesson.attendancelist.lessonattendee_set.all()])
def date_formatter(value, tooltip, small): """ Format a date to an human readable string. :param value: Date to format. :param bool tooltip: if `True`, format date to a tooltip label. :param bool small: if `True`, create a shorter string. :return: """ try: value = datetime(value.year, value.month, value.day, value.hour, value.minute, value.second) except (AttributeError, ValueError): # todo : Check why not raise template.TemplateSyntaxError() ? return value if getattr(value, 'tzinfo', None): now = datetime.now(LocalTimezone(value)) else: now = datetime.now() now = now - timedelta(microseconds=now.microsecond) if value > now: return __DATE_FMT_FUTUR else: delta = now - value # Natural time for today, absolute date after. # Reverse if in tooltip if (delta.days == 0) != tooltip: return naturaltime(value) else: return date(value, __ABS_DATE_FMT_SMALL if small else __ABS_DATE_FMT_NORMAL)
def date_formatter(value, tooltip, small): try: value = datetime(value.year, value.month, value.day, value.hour, value.minute, value.second) except AttributeError: return value except ValueError: return value if getattr(value, 'tzinfo', None): now = datetime.now(LocalTimezone(value)) else: now = datetime.now() now = now - timedelta(0, 0, now.microsecond) if value > now: return "Dans le futur" else: delta = now - value # Natural time for today, absolute date after. # Reverse if in tooltip if (delta.days == 0) != tooltip: return naturaltime(value) elif small: return date(value, 'd/m/y à H\hi') else: return date(value, 'l d F Y à H\hi')
def test_serializer_json_with_an_event(self): """ Create a ``CalendarEvent`` and test if serialize function returns the correct json. The :class:`~django_bootstrap_calendar.models.CalendarEvent` is created with 5 hours duration. """ timestamp = datetime_safe.datetime(2014, 03, 16, 12, 30, tzinfo=timezone.UTC()) event = CalendarEvent.objects.create( title="Some Event Test", start=timestamp, end=timestamp + timedelta(seconds=60*60*5), url='http://example.com', css_class='event-info' ) event_queryset = CalendarEvent.objects.filter(id=event.id) expected_json = '''{ "result": [ { "end": "1394991000000", "title": "Some Event Test", "url": "http://example.com", "id": 1, "start": "1394973000000", "class": "event-info" } ], "success": 1 }''' self.assertJSONEqual(event_serializer(event_queryset), expected_json)
def setUp(self): super().setUp() self.merch1=Merchandise.objects.create(description="Test Object 1", name="Test Object 1", item_number=1, price=10.00, item_picture="somepicture.jpg", on_hand=10) opponent = Opponents.objects.create(name='Test Name', home_town='Test Town') self.event1=Schedule.objects.create(home_or_away='H', date_time=datetime(2016, 2, 1), opponent=opponent)
def test_StrictDateTimeField_update_via_queryset_invalid_then_get(): """ So for whatever reason, by the time this gets to the FieldCleaningDescriptor the 'blep' has been converted into True ... fun. """ model = DateTimeFieldModel.objects.create(field=datetime.today()) model.__class__.objects.filter(pk=model.pk).update(field='2000-01-01') assert model.__class__.objects.get(pk=model.pk).field == datetime(2000, 1, 1, 0, 0)
def dia_hora_reserva_fi(self): return datetime(self.dia_reserva.year, self.dia_reserva.month, self.dia_reserva.day, self.hora_fi.hour, self.hora_fi.minute, self.hora_fi.second, )
def getGoalsCount(self): from bitfund.project.models import ProjectGoal return (ProjectGoal.objects.filter(project=self.id) .filter(is_public=True) .filter(date_ending__gt=now()) .filter(date_ending__lt=datetime(now().year, now().month+1, 1, tzinfo=now().tzinfo)) .filter(date_starting__lt=now()) .count() )
def test_StrictDateTimeField_descriptor_doesnt_disappear(): """ don't clobber the descriptor """ today = datetime.today() value = DateTimeFieldModel(field=today) assert value.field == today value.field = '2015-04-16' assert value.field == datetime(2015, 4, 16, 0, 0) with pytest.raises(ValidationError): value.field = 'v'*256 assert value.field == datetime(2015, 4, 16, 0, 0) value.field = today assert value.field == today with pytest.raises(TypeError): value.field = -1 with pytest.raises(ValidationError): value.field = '-1'
def test_serialize_datetime_safe(self): self.assertSerializedResultEqual( datetime_safe.date(2014, 3, 31), ("datetime.date(2014, 3, 31)", {"import datetime"}) ) self.assertSerializedResultEqual(datetime_safe.time(10, 25), ("datetime.time(10, 25)", {"import datetime"})) self.assertSerializedResultEqual( datetime_safe.datetime(2014, 3, 31, 16, 4, 31), ("datetime.datetime(2014, 3, 31, 16, 4, 31)", {"import datetime"}), )
def test_ram_storage(self): results = self.sqs.filter(id='core.mockmodel.1') # Sanity check. self.assertEqual(results.count(), 1) # Check the individual fields. result = results[0] self.assertEqual(result.id, 'core.mockmodel.1') self.assertEqual(result.text, 'This is some example text.') self.assertEqual(result.name, 'Mister Pants') self.assertEqual(result.is_active, True) self.assertEqual(result.post_count, 25) self.assertEqual(result.average_rating, 3.6) self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0)) self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00)) self.assertEqual(result.tags, ['staff', 'outdoor', 'activist', 'scientist']) self.assertEqual(result.sites, [u'3', u'5', u'1']) self.assertEqual(result.empty_list, [])
def getMonthlyTotalByType(self, transaction_type, monthdate=None): from bitfund.pledger.models import DonationTransaction, DONATION_TRANSACTION_STATUSES_CHOICES if monthdate is None: monthdate = now() donation_transactions_sum = (DonationTransaction.objects .filter(accepting_project=self.project) .filter(accepting_need=self) .filter(transaction_type=transaction_type) .filter(transaction_datetime__gte=datetime(monthdate.year, monthdate.month, 1, tzinfo=monthdate.tzinfo)) .filter(transaction_datetime__lt=datetime(monthdate.year, monthdate.month + 1, 1, tzinfo=monthdate.tzinfo)) .exclude(transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled) .aggregate(Sum('transaction_amount'))['transaction_amount__sum'] ) or 0 return Decimal(donation_transactions_sum).quantize(Decimal('0.01'))
def test_archive_multiples_per_month_and_day(self): example_blogpost1 = create_dummy_blogpost_posted_at_datetime( "example", datetime(1990, 10, 20, 14, 20, 00) ) example_blogpost2 = create_dummy_blogpost_posted_at_datetime( "example2", datetime(2004, 8, 10, 12, 00, 10) ) example_blogpost3 = create_dummy_blogpost_posted_at_datetime( "example3", datetime(2004, 3, 18, 12, 00, 10) ) example_blogpost4 = create_dummy_blogpost_posted_at_datetime( "example4", datetime(2004, 3, 18, 13, 00, 10) ) example_blogpost5 = create_dummy_blogpost_posted_at_datetime( "example5", datetime(2004, 3, 21, 12, 00, 10) ) resp = self.client.get(reverse("blog:archive")) archive_dic = resp.context["archive"] expected_archive_contents = \ ((2004, (8, (10, [example_blogpost2]) ), (3, (21, [example_blogpost5]), (18, [example_blogpost4, example_blogpost3]), ) ), (1990, (10, (20, [example_blogpost1]), ), ), ) self.assertEqual(archive_dic, self.expand_tuples_to_ordered_dic( expected_archive_contents)) for blogpost in ( example_blogpost1, example_blogpost2, example_blogpost3, example_blogpost4, example_blogpost5): blogpost.delete()
def getTotalMonthlyBudget(self, monthdate=None): if monthdate is None: monthdate = now() lasting = Decimal((ProjectNeed.objects .filter(project_id=self.id) .filter(is_public=True) .filter(date_ending=None) .aggregate(Sum('amount'))['amount__sum'] ) or 0) limited = Decimal((ProjectNeed.objects .filter(project_id=self.id) .filter(is_public=True) .filter(date_starting__lte=datetime(monthdate.year, monthdate.month, 1, tzinfo=monthdate.tzinfo)) .filter(date_ending__gt=datetime(monthdate.year, monthdate.month+1, 1, tzinfo=monthdate.tzinfo)) .aggregate(Sum('amount'))['amount__sum'] ) or 0) return limited+lasting
def init(request): club_category = ClubCategory(name="Techno") club_category.save() club = Club(name="HIFI", category=club_category) club.save() class_group = ClassGroup(label="L3") class_group.save() user = User( first_name="Rémi", last_name="Jarasson", username="******", birthdate=datetime(1991, 01, 17), email="*****@*****.**", ) user.save() app = Application(name="Pouet", slug="pouet", api_key="1234", club=club) app.save() session = Session( token="2345", time_start=datetime.now(), time_end=datetime(2011, 12, 25), user=user, application=app ) session.save() p1 = Permission(name="permission.get") p2 = Permission(name="permission.new") p3 = Permission(name="global.permission.delete") p1.save() p2.save() p3.save() app.permissions.add(p1) app.permissions.add(p3) rank = Rank(name="Président") rank.save() rank.permissions.add(p1) rank.permissions.add(p3) rank.save() user.groups.create(rank=rank, club=club) return HttpResponse("Initialization.")
def test_safe_strftime(self): self.assertEqual(date(*self.just_unsafe[:3]).strftime('%Y-%m-%d (weekday %w)'), '0999-12-31 (weekday 2)') self.assertEqual(date(*self.just_safe).strftime('%Y-%m-%d (weekday %w)'), '1000-01-01 (weekday 3)') self.assertEqual( datetime(*self.just_unsafe).strftime('%Y-%m-%d %H:%M:%S (weekday %w)'), '0999-12-31 23:59:59 (weekday 2)' ) self.assertEqual( datetime(*self.just_safe).strftime('%Y-%m-%d %H:%M:%S (weekday %w)'), '1000-01-01 00:00:00 (weekday 3)' ) self.assertEqual(time(*self.just_time).strftime('%H:%M:%S AM'), '11:30:59 AM') # %y will error before this date self.assertEqual(date(*self.percent_y_safe).strftime('%y'), '00') self.assertEqual(datetime(*self.percent_y_safe).strftime('%y'), '00') with self.assertRaisesMessage(TypeError, 'strftime of dates before 1000 does not handle %y'): datetime(*self.just_unsafe).strftime('%y') self.assertEqual(date(1850, 8, 2).strftime("%Y/%m/%d was a %A"), '1850/08/02 was a Friday')
def test_error_returned_when_updating_user_with_invalid_email( api_client, data_fixture): user, token = data_fixture.create_user_and_token( email="*****@*****.**", password="******", first_name="Test1", is_staff=True, date_joined=datetime(2021, 4, 1, 1, 0, 0, 0, tzinfo=timezone.utc), ) url = reverse("api:premium:admin:users:edit", kwargs={"user_id": user.id}) # We have to provide a str as otherwise the test api client will "helpfully" try # to serialize the dict using the endpoints serializer, which will fail before # actually running the endpoint. response = api_client.patch( url, json.dumps({"username": "******"}), format="json", HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_400_BAD_REQUEST assert response.json()["error"] == "ERROR_REQUEST_BODY_VALIDATION"
def _from_python(self, value): """ Converts Python values to a string for Whoosh. Code courtesy of pysolr. """ if hasattr(value, 'strftime'): if not hasattr(value, 'hour'): value = datetime(value.year, value.month, value.day, 0, 0, 0) elif isinstance(value, bool): if value: value = 'true' else: value = 'false' elif isinstance(value, (list, tuple)): value = u','.join([force_text(v) for v in value]) elif isinstance(value, (six.integer_types, float)): # Leave it alone. pass else: value = force_text(value) return value
def _to_python(self, value): """ Converts values from Whoosh to native Python values. A port of the same method in pysolr, as they deal with data the same way. """ if value == 'true': return True elif value == 'false': return False if value and isinstance(value, six.string_types): possible_datetime = DATETIME_REGEX.search(value) if possible_datetime: date_values = possible_datetime.groupdict() for dk, dv in date_values.items(): date_values[dk] = int(dv) return datetime(date_values['year'], date_values['month'], date_values['day'], date_values['hour'], date_values['minute'], date_values['second']) try: # Attempt to use json to load the values. converted_value = json.loads(value) # Try to handle most built-in types. if isinstance( converted_value, (list, tuple, set, dict, six.integer_types, float, complex)): return converted_value except: # If it fails (SyntaxError or its ilk) or we don't trust it, # continue on. pass return value
def _from_python(self, value): """ Converts Python values to a string for Whoosh. Code courtesy of pysolr. """ if hasattr(value, "strftime"): if not hasattr(value, "hour"): value = datetime(value.year, value.month, value.day, 0, 0, 0) elif isinstance(value, bool): if value: value = "true" else: value = "false" elif isinstance(value, (list, tuple)): value = ",".join([force_str(v) for v in value]) elif isinstance(value, (int, float)): # Leave it alone. pass else: value = force_str(value) return value
def test_items_creation(self): Category.objects.create(name='tom', color='ffffff') DateEntry.objects.create(date=datetime(2019, 12, 11)) entrys = [] items = (('Kostka do gry', d(5.20)), ('Cyna', d(21.33)), ('Pudełko', d(34))) for name, price in items: entrys.append( CostItem(category=Category.objects.get(), date=DateEntry.objects.get(), name=name, value=Decimal(5.20))) CostItem.objects.bulk_create(entrys) cat = Category.objects.get() print(cat.cost_items.all()) print(cat.cost_items.all_costs()) print(cat.income_items.all()) assert cat.cost_items.count() == 3 assert cat.income_items.count() == 0
def test_admin_can_sort_users(api_client, data_fixture): _, token = data_fixture.create_user_and_token( email="*****@*****.**", password="******", first_name="Test1", is_staff=True, ) searched_for_user = data_fixture.create_user( email="*****@*****.**", password="******", first_name="Test1", date_joined=datetime(2021, 4, 1, 1, 0, 0, 0, tzinfo=timezone.utc), ) url = reverse("api:premium:admin_user:users") response = api_client.get( f"{url}?page=1&search=specific_user", format="json", HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_200_OK assert response.json() == { "count": 1, "next": None, "previous": None, "results": [{ "date_joined": "2021-04-01T01:00:00Z", "name": searched_for_user.first_name, "username": searched_for_user.email, "groups": [], "id": searched_for_user.id, "is_staff": False, "is_active": True, "last_login": None, }], }
def berechne_ostern(jahr): # Berechnung von Ostern mittels Gaußscher Osterformel # siehe http://www.ptb.de/de/org/4/44/441/oste.htm # mindestens bis 2031 richtig K = jahr // 100 M = 15 + ((3 * K + 3) // 4) - ((8 * K + 13) // 25) S = 2 - ((3 * K + 3) // 4) A = jahr % 19 D = (19 * A + M) % 30 R = (D + (A // 11)) // 29 OG = 21 + D - R SZ = 7 - (jahr + (jahr // 4) + S) % 7 OE = 7 - ((OG - SZ) % 7) tmp = OG + OE # das Osterdatum als Tages des März, also 32 entspricht 1. April m = 0 if tmp > 31: # Monat erhöhen, tmp=tag erniedriegen m = tmp // 31 if tmp == 31: m = 0 tmp = tmp - 31 return timezone.make_aware(datetime(year=jahr, month=3 + m, day=tmp))
def convert(self, value): if value is None: return None if isinstance(value, six.string_types): match = DATETIME_REGEX.search(value) if match: data = match.groupdict() return datetime_safe.datetime( int(data["year"]), int(data["month"]), int(data["day"]), int(data["hour"]), int(data["minute"]), int(data["second"]), ) else: raise SearchFieldError( "Datetime provided to '%s' field doesn't appear to be a valid datetime string: '%s'" % (self.instance_name, value)) return value
def test_task_add(self): """ POST /api/task/ data { 'title': 'Example', 'list': 1, 'notes': 'This is an example', 'priority': 1, 'remind_me_on': '2020-09-09 01:01:01', 'activity_type': 1, 'status': 1 'tags': [1] } response 200 {'message': 'New task created', 'data': {'task_id': 2}} """ print("\nTest Task add") url = reverse('task-add-list') data = { 'title': 'Example', 'list': 1, 'notes': 'This is an example', 'priority': 1, 'remind_me_on': make_aware(datetime(2020, 9, 15, 13, 0, 0)), 'activity_type': 1, 'status': 1, 'tags': [1], } response = self.client.post( url, data, HTTP_AUTHORIZATION=f'Token {self.token}', format='json' ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( response.json(), {'message': 'New task created', 'data': {'task_id': 2}} )
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.utils.datetime_safe import datetime from pytz import UTC POSIX_ZERO = datetime(1970, 1, 1, tzinfo=UTC)
def test_year_and_week_to_monday(self): date = datetime(2017, 12, 18) self.assertEqual(date, year_and_week_to_monday(2017, 51))
# allow insecure transports for OAUTHLIB in DEBUG mode if DEBUG: os.environ.setdefault("OAUTHLIB_INSECURE_TRANSPORT", "y") # Get the promo PROMO_CODE_KEY = os.environb.get(b"PROMO_CODE_KEY", b"prout") PROMO_CODE_TAG = os.environ.get("PROMO_CODE_TAG", "Code promo matériel") CERTIFIED_GROUP_SUBTYPES = os.environ.get( "CERTIFIED_GROUP_SUBTYPES", "certifié,thématique certifié").split(",") CERTIFIED_2022_GROUP_SUBTYPES = os.environ.get( "CERTIFIED_GROUP_SUBTYPES", "certifié 2022,thématique certifié 2022").split(",") if os.environ.get("PROMO_CODE_DELAY") is not None: year, month, day = ( int(value) for value in os.environ.get("PROMO_CODE_DELAY").split("-")) PROMO_CODE_DELAY = make_aware(datetime(year, month, day)) else: PROMO_CODE_DELAY = None CERTIFIABLE_GROUP_TYPES = ["L", "B"] # groupes locaux # groupes thématiques CERTIFIABLE_GROUP_SUBTYPES = ["comité d'appui"] # HTML settings USER_ALLOWED_TAGS = [ "a", "abbr", "acronym", "b", "br", "blockquote", "code", "em",
def sort_schichten_in_templates(self): splitted_templates = [] templates = get_schicht_templates(asn=self.request.user.assistenznehmer, order_by='beginn') # Todo Sub-Templates und verschobene Templates for template in templates: if template.beginn < template.ende: splitted_templates.append( { 'beginn': template.beginn, 'ende': template.ende } ) else: splitted_templates.append( { 'beginn': template.beginn, 'ende': time(0) } ) splitted_templates.append( { 'beginn': time(0), 'ende': template.ende } ) splitted_templates = sorted(splitted_templates, key=lambda j: j['beginn']) start = self.act_date # schichtsammlung durch ergänzung von leeren Tagen zu Kalender konvertieren end = shift_month(self.act_date, step=1) monatsletzter = (end - timedelta(days=1)).day schichten = get_sliced_schichten_by_asn( start=self.act_date, end=end, asn=self.request.user.assistenznehmer ) table_array = {} for i in range(1, monatsletzter + 1): datakey = datetime(year=self.act_date.year, month=self.act_date.month, day=i) template_counter = 0 table_array[datakey] = {} for template in splitted_templates: temp_beginn = timezone.make_aware(datetime.combine(datakey, template['beginn'])) if template['ende'] == time(0): temp_ende = timezone.make_aware( datetime.combine( datakey + timedelta(days=1), template['ende'] ) ) else: temp_ende = timezone.make_aware(datetime.combine(datakey, template['ende'])) table_array[datakey][template_counter] = [] schicht_counter = 0 for schicht in schichten: # print(temp_beginn) # print(schicht['beginn']) # print(temp_ende) # print(schicht['ende']) # print('--------------------') if schicht['beginn'] == temp_beginn and schicht['ende'] == temp_ende: # Wenn sich mehrere Assistenten um die gleiche Schicht "bewerben", # können mehrere Schichten im selben Template stehen table_array[datakey][template_counter].append(schicht) schichten.remove(schicht) schicht_counter += 1 if schicht_counter == 0: table_array[datakey][template_counter] = [] template_counter += 1 print(schichten) print('---hurz-----') return splitted_templates, table_array
CACHE_TIMEOUT = 10 * 60 CACHE_TIMEOUT_LONG = 60 * 60 DOUBLE_RENDER = False DOUBLE_RENDER_EXCLUDE_URLS = None APP_DATA_CLASSES = {} # Box BOX_INFO = 'ella.core.box.BOX_INFO' MEDIA_KEY = 'ella.core.box.MEDIA_KEY' # Publishing configuration CATEGORY_LISTINGS_PAGINATE_BY = 20 CATEGORY_NO_HOME_LISTINGS = False PUBLISH_FROM_WHEN_EMPTY = utc_localize(datetime(3000, 1, 1)) RELATED_FINDERS = { 'default': ( 'ella.core.related_finders.directly_related', 'ella.core.related_finders.related_by_category', ), 'directly': ('ella.core.related_finders.directly_related', ) } LISTING_HANDLERS = { 'default': 'ella.core.managers.ModelListingHandler', } # Category settings CATEGORY_TEMPLATES = (('category.html', gettext('default (category.html)')), )
def comparteixCalendari(request, clau): cal = Calendar() cal.add('method', 'PUBLISH') # IE/Outlook needs this try: dades_adicionals_professor = DadesAddicionalsProfessor.objects.get( clauDeCalendari=clau) professor = dades_adicionals_professor.professor except: return HttpResponseNotFound("") else: #-- imparticions imparticions = list( Impartir.objects.filter(horari__professor=professor). select_related("reserva").select_related( "reserva__aula").select_related("horari").select_related( "horari__hora").select_related("horari__assignatura")) for instance in imparticions: event = Event() assignatura = instance.horari.assignatura.nom_assignatura aula = instance.reserva.aula.nom_aula if hasattr( instance, "reserva") and instance.reserva is not None else "" grup = instance.horari.grup.descripcio_grup if hasattr( instance.horari, "grup") and instance.horari.grup is not None else "" summary = u"{assignatura} {aula} {grup}".format( assignatura=assignatura, aula=aula, grup=grup, ) d = instance.dia_impartir h = instance.horari.hora event.add( 'dtstart', localtime( datetime(d.year, d.month, d.day, h.hora_inici.hour, h.hora_inici.minute, h.hora_inici.second))) event.add( 'dtend', localtime( datetime(d.year, d.month, d.day, h.hora_fi.hour, h.hora_fi.minute, h.hora_fi.second))) event.add('summary', summary) event.add('uid', 'djau-ical-impartir-{0}'.format(instance.id)) event['location'] = vText(aula) cal.add_component(event) #-- sortides q_professor = Q(professor_que_proposa=professor) q_professor |= Q(altres_professors_acompanyants=professor) q_professor |= Q(professors_responsables=professor) sortides = list( Sortida.objects.filter(q_professor).filter( calendari_desde__isnull=False).exclude(estat__in=[ 'E', 'P', ]).distinct()) for instance in sortides: event = Event() summary = u"{ambit}: {titol}".format( ambit=instance.ambit, titol=instance.titol_de_la_sortida) event.add('dtstart', localtime(instance.calendari_desde)) event.add('dtend', localtime(instance.calendari_finsa)) event.add('summary', summary) organitzador = u"\nOrganitza: " organitzador += u"{0}".format( u"Departament" + instance.departament_que_organitza.nom if instance. departament_que_organitza_id else u"") organitzador += " " + instance.comentari_organitza event.add( 'organizer', vText(u"{0} {1}".format( u"Departament " + instance.departament_que_organitza.nom if instance.departament_que_organitza_id else u"", instance.comentari_organitza))) event.add('description', instance.programa_de_la_sortida + organitzador) event.add('uid', 'djau-ical-sortida-{0}'.format(instance.id)) event['location'] = vText(instance.ciutat) cal.add_component(event) return HttpResponse(cal.to_ical())
def _prepare_user_public_template_data(request, user): profile = Profile.objects.get(user_id=user.id) template_data = {} template_data['giving_monthly'] = (DonationTransaction.objects.filter( pledger_user_id=user.id).filter(transaction_datetime__gte=datetime( now().year, now().month, 1, tzinfo=now().tzinfo)).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. cancelled).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected).aggregate(Sum('transaction_amount')) ['transaction_amount__sum']) or 0 template_data['gave_totally'] = (DonationTransaction.objects.filter( pledger_user_id=user.id).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.rejected ).aggregate(Sum('transaction_amount'))['transaction_amount__sum']) or 0 template_data['maintained_projects_count'] = Project.objects.filter( maintainer_id=user.id).count() template_data['maintained_unpublished_projects_count'] = ( Project.objects.filter(maintainer_id=user.id).filter( is_public=False).count()) template_data['maintained_public_projects_list'] = (Project.objects.filter( maintainer_id=user.id).filter(is_public=True)) supported_projects_count = (DonationTransaction.objects.filter( pledger_user_id=user.id).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude(transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected).values('accepting_project_key').distinct().count()) template_data['is_supported_projects_list_public'] = False if profile.projects_list_is_public and supported_projects_count >= MINIMAL_SUPPORTED_PROJECTS_COUNT_FOR_PUBLIC: template_data['is_supported_projects_list_public'] = True supported_projects_keys_list = (DonationTransaction.objects.filter( pledger_user_id=user.id).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude(transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected).values('accepting_project_key', 'accepting_project_title').distinct()) template_data['supported_projects_list'] = [] for supported_project in supported_projects_keys_list: project = Project.objects.filter( key=supported_project['accepting_project_key']) project_key = False project_title = False if project.count() == 1: project = project[0] project_title = project.title if project.is_public: project_key = project.key else: project_title = supported_project['accepting_project_title'] template_data['supported_projects_list'].append({ 'title': project_title, 'key': project_key, }) return template_data
def _prepare_user_pledges_monthly_history_data(request, user): pledges_monthly_history = [] total_transactions = (DonationTransaction.objects.filter( pledger_user_id=user.id).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude(transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected).count()) if total_transactions == 0: return pledges_monthly_history month_upper_bound = (DonationTransaction.objects.filter( pledger_user_id=user.id).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.rejected ).order_by('-transaction_datetime')[0]).transaction_datetime month_lower_bound = (DonationTransaction.objects.filter( pledger_user_id=user.id).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.rejected ).order_by('transaction_datetime')[0]).transaction_datetime index_year = int(month_lower_bound.year) index_month = int(month_lower_bound.month) while True: current_month = datetime(year=index_year, month=index_month, day=1, tzinfo=now().tzinfo) if index_month == 12: next_month = datetime(year=index_year + 1, month=1, day=1, tzinfo=now().tzinfo) else: next_month = datetime(year=index_year, month=index_month + 1, day=1, tzinfo=now().tzinfo) if current_month > month_upper_bound: break index_month = index_month + 1 if index_month > 12: index_year = index_year + 1 index_month = 1 current_months_transactions_monthly = ( DonationTransaction.objects.filter(pledger_user_id=user.id).filter( transaction_datetime__gte=current_month).filter( transaction_datetime__lt=next_month).filter( pledger_donation_type=DONATION_TYPES_CHOICES.monthly). exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. cancelled).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected)) current_months_transactions_onetime = ( DonationTransaction.objects.filter(pledger_user_id=user.id).filter( transaction_datetime__gte=current_month).filter( transaction_datetime__lt=next_month).filter( pledger_donation_type=DONATION_TYPES_CHOICES.onetime). exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. cancelled).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected)) current_months_transactions_total = 0 current_months_transactions_total = current_months_transactions_total + ( (current_months_transactions_monthly.aggregate( Sum('transaction_amount'))['transaction_amount__sum']) or 0) current_months_transactions_total = current_months_transactions_total + ( (current_months_transactions_onetime.aggregate( Sum('transaction_amount'))['transaction_amount__sum']) or 0) current_months_monthly_pledged_projects_list = ( current_months_transactions_monthly.values( 'accepting_project_key', 'accepting_project_title').distinct()) monthly_pledged_projects = [] for monthly_pledged_project in current_months_monthly_pledged_projects_list: project_total_pledge = (current_months_transactions_monthly.filter( accepting_project_key=monthly_pledged_project[ 'accepting_project_key']).aggregate( Sum('transaction_amount'))['transaction_amount__sum']) active_project = Project.objects.filter( key=monthly_pledged_project['accepting_project_key']) project_key = False project_title = False if active_project.count() == 1: active_project = active_project[0] project_title = active_project.title if active_project.is_public: project_key = active_project.key else: project_title = monthly_pledged_project[ 'accepting_project_title'] monthly_pledged_projects.append({ 'project_key': project_key, 'project_title': project_title, 'project_total_pledge': project_total_pledge, }) current_months_onetime_pledged_projects_list = ( current_months_transactions_onetime.values( 'accepting_project_key', 'accepting_project_title').distinct()) onetime_pledged_projects = [] for onetime_pledged_project in current_months_onetime_pledged_projects_list: project_total_pledge = (current_months_transactions_onetime.filter( accepting_project_key=onetime_pledged_project[ 'accepting_project_key']).aggregate( Sum('transaction_amount'))['transaction_amount__sum']) active_project = Project.objects.filter( key=onetime_pledged_project['accepting_project_key']) project_key = False project_title = False if active_project.count() == 1: active_project = active_project[0] project_title = active_project.title if active_project.is_public: project_key = active_project.key else: project_title = onetime_pledged_project[ 'accepting_project_title'] onetime_pledged_projects.append({ 'project_key': project_key, 'project_title': project_title, 'project_total_pledge': project_total_pledge, }) monthly_data = { 'date': current_month, 'monthly_total': current_months_transactions_total, 'monthly_pledged_projects': monthly_pledged_projects, 'onetime_pledged_projects': onetime_pledged_projects, } pledges_monthly_history.append(monthly_data) return pledges_monthly_history
def get_monatserster(datum): return timezone.make_aware( datetime(year=datum.year, month=datum.month, day=1))
def add_interest_timestamp_where_interest_processed(apps, schema_editor): ImportBatch = apps.get_model('logs', 'ImportBatch') ImportBatch.objects.filter(interest_processed=True, interest_timestamp__isnull=True).\ update(interest_timestamp=datetime(2018, 1, 1, 0, 0, 0, tzinfo=utc))
def compare_dates(a, b): a = a["date"] b = b["date"] day_diff = day_diff_without_year(datetime(a.year, a.month, a.day), datetime(b.year, b.month, b.day)) return day_diff if day_diff != 1 else b.year - a.year
def day_diff_without_year(date, today): return (datetime(today.year, date.month, date.day) - today).days + 1
def test_create_form_csv_includes_tags(self): buffer = StringIO() build_absolute_uri = lambda x: "http://example.com" form_instance = FormInstance.objects.create(form=self.form, form_json=[{ "schema": { "slug": "first", "properties": { "name": { "type": "string" }, "email": { "type": "string" } } } }]) first_tag = TagFactory.create(name="First Tag") second_tag = TagFactory.create(name="Second Tag, with, commas") response_1 = FormResponseFactory.create( form_instance=form_instance, submission_date=datetime(2018, 1, 1, tzinfo=pytz.utc), json={ "name": "Peter", "email": "*****@*****.**" }) response_1.tags.set([first_tag, second_tag]) response_2 = FormResponseFactory.create(form_instance=form_instance, submission_date=datetime( 2018, 1, 2, tzinfo=pytz.utc), json={ "name": "Katharina", "email": "*****@*****.**" }) create_form_csv(self.form, self.investigation.slug, build_absolute_uri, buffer) lines = buffer.getvalue().split('\n') header = lines[0].strip() expected_header = "email,meta_comments,meta_id,meta_status,meta_submission_date,meta_tags,meta_url,meta_version,name" self.assertEquals(header, expected_header) first = lines[1].strip() expected_first = '[email protected],,{},Inbox,2018-01-01 00:00:00+00:00,"First Tag, Second Tag with commas",http://example.com,0,Peter'.format( response_1.id) self.assertEquals(first, expected_first) second = lines[2].strip() expected_second = "[email protected],,{},Inbox,2018-01-02 00:00:00+00:00,,http://example.com,0,Katharina".format( response_2.id) self.assertEquals(second, expected_second)
def handle(self, *args, **options): first_of_may = datetime(year=2013, month=5, day=1) new_pla_start = datetime(year=2013, month=9, day=20) date_end = datetime(year=2013, month=9, day=23) ad_group_list = [] data = {} from purchase.models import LineItem from analytics.models import CampaignTrack for row in self._get_entries(): try: ad = ProductAdWords.objects.get(product__slug=row["slug"]) except Exception: continue ad_group_list.append(ad.ad_group_id) data[ad.ad_group_id] = row row['orders_since_start'] = LineItem.objects.filter( product=ad.product, created__gte=first_of_may, ).count() orders_since_new_pla = LineItem.objects.filter( product=ad.product, created__gte=new_pla_start, ) for line_item in orders_since_new_pla: user = line_item.order.user pla_count = CampaignTrack.objects.filter( user=user, created_at__year=line_item.created.year, created_at__month=line_item.created.month, created_at__day=line_item.created.day, name__contains="PLA", ).count() if pla_count > 0: row['orders_since_new_pla'] += 1 row['status'] = ad.status for ad_group in get_adgroup_data(ad_group_list, first_of_may, date_end): data[ad_group['id']]['google_cost'] = ad_group['stats']['cost'][ 'microAmount'] data[ad_group['id']]['total_cost_since_start'] = float( ad_group['stats']['cost']['microAmount']) / float(1000000) for ad_group in get_adgroup_data(ad_group_list, new_pla_start, date_end): data[ad_group['id']]['total_cost_since_new_pla'] = float( ad_group['stats']['cost']['microAmount']) / float(1000000) with open("profitable_adwords_new_data.csv", "wb") as csvfile: writer = csv.writer(csvfile, delimiter=";", quotechar='"', quoting=csv.QUOTE_ALL) for row in data.itervalues(): row['revenue_since_new_pla'] = row[ 'orders_since_new_pla'] * row['total_price'] * 0.2 row['total_profit_since_new_pla'] = row[ 'revenue_since_new_pla'] - row['total_cost_since_new_pla'] writer.writerow( (row['slug'], row['url'], row['price'], row['shipping_price'], row['total_price'], row['google_cost'], row['total_cost_since_start'], row['total_cost_since_new_pla'], row['orders_since_start'], row['orders_since_new_pla'], row['revenue_since_new_pla'], row['total_profit_since_new_pla'], row['status'])) csvfile.close()
def _calculate_modules_statistics(modules, user): # arbitrary default value for last interaction default_min_time = datetime(1970, 1, 1, tzinfo=pytz.utc) topic_property_defaults = { 'last_interaction': default_min_time, 'max_watched_time': 0, 'total_watched_time': 0, 'interactions_count': 0, 'topics_count': 0, 'finished_topics_count': 0, } # this is here due to bug on Heroku which is not installing Python 3.8: # https://sentry.io/organizations/python-pro/issues/1471675608/?project=236278&query=is%3Aunresolved def sum_with_start_0(lst): lst = list(lst) if len(lst) == 0: return 0 return sum(lst) aggregation_functions = { 'last_interaction': partial(max, default=default_min_time), 'max_watched_time': sum_with_start_0, 'total_watched_time': sum_with_start_0, 'interactions_count': sum_with_start_0, 'topics_count': sum_with_start_0, 'finished_topics_count': sum_with_start_0, 'duration': sum_with_start_0, } def _aggregate_statistics(contents, content_children_property_name): for content, (property_, aggregation_function) in product( contents, aggregation_functions.items()): children = getattr(content, content_children_property_name) setattr(content, property_, aggregation_function(map(attrgetter(property_), children))) def _flaten(iterable, children_property_name): for i in iterable: for child in getattr(i, children_property_name): yield child def calculate_progression(content): try: return min(content.max_watched_time / content.duration, 1) except ZeroDivisionError: return 0 qs = TopicInteraction.objects.filter( user=user).values('topic_id').annotate( last_interaction=Max('creation'), interactions_count=Count('*'), max_watched_time=Max('max_watched_time'), total_watched_time=Sum('total_watched_time')).all() user_interacted_topics = {t['topic_id']: t for t in qs} all_sections = list(_flaten(modules, 'sections')) all_chapters = list(_flaten(all_sections, 'chapters')) all_topics = list(_flaten(all_chapters, 'topics')) for topic, (property_, default_value) in product(all_topics, topic_property_defaults.items()): user_interaction_data = user_interacted_topics.get(topic.id, {}) setattr(topic, property_, user_interaction_data.get(property_, default_value)) for topic in all_topics: topic.progress = calculate_progression(topic) topic.topics_count = 1 watched_to_end = topic.progress > 0.99 spent_half_time_watching = topic.total_watched_time * 2 > topic.duration topic.finished_topics_count = 1 if ( watched_to_end and spent_half_time_watching) else 0 contents_with_children_property_name = [(all_chapters, 'topics'), (all_sections, 'chapters'), (modules, 'sections')] for contents, content_children_property_name in contents_with_children_property_name: _aggregate_statistics(contents, content_children_property_name) for content in chain(all_chapters, all_sections, modules): setattr(content, 'progress', calculate_progression(content)) return modules
def field(self, obj): return datetime(2016, 6, 25)
def test_serialize(self): """ Tests various different forms of the serializer. This does not care about formatting, just that the parsed result is correct, so we always exec() the result and check that. """ # Basic values self.assertSerializedEqual(1) self.assertSerializedEqual(None) self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual(set([2, 3, "eighty"])) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) # Functions with six.assertRaisesRegex(self, ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) # Datetime stuff self.assertSerializedEqual(datetime.datetime.utcnow()) self.assertSerializedEqual(datetime.datetime.utcnow) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) with self.assertRaises(ValueError): self.assertSerializedEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=get_default_timezone())) safe_date = datetime_safe.date(2014, 3, 31) string, imports = MigrationWriter.serialize(safe_date) self.assertEqual(string, repr(datetime.date(2014, 3, 31))) self.assertEqual(imports, {'import datetime'}) safe_datetime = datetime_safe.datetime(2014, 3, 31, 16, 4, 31) string, imports = MigrationWriter.serialize(safe_datetime) self.assertEqual(string, repr(datetime.datetime(2014, 3, 31, 16, 4, 31))) self.assertEqual(imports, {'import datetime'}) # Classes validator = RegexValidator(message="hello") string, imports = MigrationWriter.serialize(validator) self.assertEqual( string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) validator = EmailValidator(message="hello") # Test with a subclass. string, imports = MigrationWriter.serialize(validator) self.assertEqual( string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible( path="custom.EmailValidator")(EmailValidator)(message="hello") string, imports = MigrationWriter.serialize(validator) self.assertEqual(string, "custom.EmailValidator(message='hello')") # Django fields self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) # Setting references self.assertSerializedEqual( SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ( "settings.AUTH_USER_MODEL", set(["from django.conf import settings"]), )) self.assertSerializedResultEqual(((x, x * x) for x in range(3)), ( "((0, 0), (1, 1), (2, 4))", set(), ))
class Chat(models.Model): type = models.CharField(max_length=10, choices=ChatTypeChoices) # self -> '<owner>' # cs -> '<tag>|<owner>' # user -> '<user1>,<user2>', user1 < user2 key = models.CharField(max_length=100, null=True, unique=True, editable=False) # biz id app_id = models.CharField(max_length=16, null=True, blank=True) biz_id = models.CharField(max_length=160, null=True, unique=True, blank=True) title = models.CharField(max_length=64, null=False, default="", blank=True) tag = models.CharField(max_length=8, null=False, default="", db_index=True, blank=True) # 起始消息id start_msg_id = models.BigIntegerField(editable=False, default=0) # 最后消息id, 消息id是针对每个会话的 msg_id = models.BigIntegerField(editable=False, default=0) # 最后消息时间, 针对所有会话, 检查是否有更新 last_msg_ts = models.DateTimeField(editable=False, default=datetime(1970, 1, 1)) ext = models.TextField(default="", blank=True) is_deleted = models.BooleanField(default=False) created = models.DateTimeField(auto_now_add=True, editable=False) # 添加成员之后需要更新这里 members_updated = models.DateTimeField(auto_now=True, editable=False) updated = models.DateTimeField(auto_now=True, editable=False) # 所有者 owner = models.CharField(max_length=32, null=True, default=None, db_index=True, blank=True) def update_updated(self, fields=None): self.save(update_fields=['updated'] + (fields if fields else [])) def update_members_updated(self, fields=None): self.save(update_fields=['updated', 'members_updated'] + (fields if fields else [])) # only for migration. def set_key(self): if self.key is None: if self.type == ChatType.SELF: self.key = self.owner self.save() elif self.type == ChatType.USER: users = [m.user for m in self.members.all()] self.key = ','.join(sorted(users)) self.save() elif self.type == ChatType.CS: self.key = '%s|%s' % (self.tag, self.owner) self.save() @property def chat_id(self): return "%s.%d" % (self.type, self.id) class Meta: verbose_name = _("Chat") verbose_name_plural = _("Chats") unique_together = (("type", "key"), ) def save(self, *args, **kwargs): if not self.biz_id: self.biz_id = None super(Chat, self).save(*args, **kwargs) def __str__(self): return "%s#%d@%s" % (self.type, self.id, self.tag)
def test_admin_can_see_admin_users_endpoint(api_client, data_fixture): staff_user, token = data_fixture.create_user_and_token( email="*****@*****.**", password="******", first_name="Test1", is_staff=True, date_joined=datetime(2021, 4, 1, 1, 0, 0, 0, tzinfo=timezone.utc), ) group_user_is_admin_of = data_fixture.create_group() data_fixture.create_user_group( group=group_user_is_admin_of, user=staff_user, permissions=GROUP_USER_PERMISSION_ADMIN, ) group_user_is_not_admin_of = data_fixture.create_group() data_fixture.create_user_group( group=group_user_is_not_admin_of, user=staff_user, permissions=GROUP_USER_PERMISSION_MEMBER, ) response = api_client.get( reverse("api:premium:admin_user:users"), format="json", HTTP_AUTHORIZATION=f"JWT {token}", ) assert response.status_code == HTTP_200_OK assert response.json() == { "count": 1, "next": None, "previous": None, "results": [{ "date_joined": "2021-04-01T01:00:00Z", "name": staff_user.first_name, "username": staff_user.email, "groups": [ { "id": group_user_is_admin_of.id, "name": group_user_is_admin_of.name, "permissions": GROUP_USER_PERMISSION_ADMIN, }, { "id": group_user_is_not_admin_of.id, "name": group_user_is_not_admin_of.name, "permissions": GROUP_USER_PERMISSION_MEMBER, }, ], "id": staff_user.id, "is_staff": True, "is_active": True, "last_login": None, }], }
def _prepare_project_budget_history_template_data(request, project, monthdate=None): budget_month_history_data = {} budget_month_history_data['starting_balance'] = 0 budget_month_history_data['withdrawn'] = 0 budget_month_history_data['ending_balance'] = 0 if monthdate is None: monthdate = datetime(now().year, now().month, 1, tzinfo=now().tzinfo) budget_month_history_data['month'] = monthdate this_month_start = monthdate next_month_start = add_months(monthdate, 1) onetime_pledges_monthly = ( DonationTransaction.objects.filter(accepting_project_id=project.id). filter( transaction_type=DONATION_TRANSACTION_TYPES_CHOICES.pledge).filter( pledger_donation_type=DONATION_TYPES_CHOICES.onetime).filter( transaction_datetime__gte=this_month_start).filter( transaction_datetime__lt=next_month_start). exclude(accepting_goal__isnull=False).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.rejected)) budget_month_history_data['onetime_pledges_monthly_total'] = ( onetime_pledges_monthly.aggregate( Sum('transaction_amount'))['transaction_amount__sum']) or 0 budget_month_history_data[ 'onetime_pledges_monthly_count'] = onetime_pledges_monthly.count() budget_month_history_data[ 'onetime_pledges_monthly_users_count'] = onetime_pledges_monthly.values( 'pledger_username').distinct().count() monthly_pledges_monthly = ( DonationTransaction.objects.filter(accepting_project_id=project.id). filter( transaction_type=DONATION_TRANSACTION_TYPES_CHOICES.pledge).filter( pledger_donation_type=DONATION_TYPES_CHOICES.monthly).filter( transaction_datetime__gte=this_month_start).filter( transaction_datetime__lt=next_month_start). exclude(accepting_goal__isnull=False).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.cancelled ).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES.rejected)) budget_month_history_data['monthly_pledges_monthly_total'] = ( monthly_pledges_monthly.aggregate( Sum('transaction_amount'))['transaction_amount__sum']) or 0 budget_month_history_data['subscription_count'] = ( DonationSubscription.objects.filter(project_id=project.id).filter( is_active=True).count()) redonations_paidin_monthly = (DonationTransaction.objects.filter( accepting_project_id=project.id).filter( transaction_type=DONATION_TRANSACTION_TYPES_CHOICES.redonation ).filter(transaction_datetime__gte=this_month_start).filter( transaction_datetime__lt=next_month_start).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. cancelled).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected)) budget_month_history_data['redonations_paidin_monthly_total'] = ( redonations_paidin_monthly.aggregate( Sum('transaction_amount'))['transaction_amount__sum']) or 0 budget_month_history_data[ 'redonations_paidin_monthly_projects_count'] = redonations_paidin_monthly.values( 'redonation_project_key').distinct().count() redonations_paidout_monthly = (DonationTransaction.objects.filter( redonation_project_id=project.id).filter( transaction_type=DONATION_TRANSACTION_TYPES_CHOICES.redonation ).filter(transaction_datetime__gte=this_month_start).filter( transaction_datetime__lt=next_month_start).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. cancelled).exclude( transaction_status=DONATION_TRANSACTION_STATUSES_CHOICES. rejected)) budget_month_history_data['redonations_paidout_monthly_total'] = ( redonations_paidout_monthly.aggregate( Sum('transaction_amount'))['transaction_amount__sum']) or 0 budget_month_history_data[ 'redonations_paidout_monthly_projects_count'] = redonations_paidout_monthly.values( 'accepting_project_key').distinct().count() return budget_month_history_data
def get_date_from_month_id(month_id, format="%b-%y"): y, m = divmod(month_id, 12) return datetime(y, m + 1, 1).strftime(format)