def test_at(self): now = Transmission.at( timezone.make_aware(datetime.datetime(2015, 1, 6, 14, 30, 0))) self.assertListEqual( [(t.programme.slug, t.start) for t in now], [(u'classic-hits', timezone.make_aware( datetime.datetime(2015, 1, 6, 14, 0)))])
def search(self, serie, numero, remitente, destinatario, sucursal, fecha): flat = serie or numero or remitente or destinatario or fecha tz = timezone.get_current_timezone() if flat: if fecha: "si se ingreso fecha" date = datetime.strptime(fecha, "%d/%m/%Y") end_date = timezone.make_aware(date, tz) start_date = end_date - timedelta(days=7) else: date = datetime.strptime("01/10/2015", "%d/%m/%Y") end_date = timezone.now() start_date = timezone.make_aware(date, tz) else: end_date = timezone.now() start_date = end_date - timedelta(days=7) busqueda = self.annotate( saldo=F('depositslip__total_amount')-F('amount') ).filter( depositslip__serie__icontains=serie, depositslip__number__icontains=numero, depositslip__sender__full_name__icontains=remitente, depositslip__addressee__full_name__icontains=destinatario, depositslip__state='2', depositslip__destination=sucursal, depositslip__created__range=(start_date, end_date) ) return busqueda
def test_emp_man_hours(self): """Test for employees who clocked in before start is accurate""" start = timezone.make_aware(dt.datetime(2016, 6, 3, 6, 30)) stop = timezone.make_aware(dt.datetime(2016, 6, 3, 10, 30)) emp_hours = 0 expected_emp_hours = 20.95 # getting employee objects that are clocked in clocked_in_emp = get_clocked_in(start) emp_that_left = get_emp_who_left_during_shift(start, stop) emp_that_breaked = get_emp_who_left_on_break(start, stop) # testing return of number of hours for employee in clocked_in_emp: print("EMP= ", employee.EMP_ID_TXT) emp_hour = get_emp_man_hours(employee, start, stop) print("EMP HOUR= ", emp_hour) emp_hours += emp_hour for employee in emp_that_left: print("EMP= ", employee.EMP_ID_TXT) emp_hour = get_emp_man_hours(employee, start, stop) print("EMP HOUR= ", emp_hour) emp_hours += emp_hour for employee in emp_that_breaked: print("EMP= ", employee.EMP_ID_TXT) emp_hour = get_emp_man_hours(employee, start, stop) print("EMP HOUR= ", emp_hour) emp_hours += emp_hour self.assertAlmostEqual(emp_hours, expected_emp_hours)
def user_populate_history(self, user, follow_user): week_ago = datetime.datetime.now() - datetime.timedelta(weeks=1) timezone.make_aware(week_ago, timezone.get_current_timezone()) # get all the visits that the new followee has eyehists = follow_user.eyehistory_set.filter( start_time__gt=week_ago).select_related() for e in queryset_iterator(eyehists): url = e.url url = url[:min(255, len(url))] # popularhistoryinfo stores general information about this page # such as description, title, domain, image, etc. p = PopularHistoryInfo.objects.filter(url=url) if p.exists(): p = p[0] # create a popular history item for the user and the visit that # that user's followee has been to user_pop, _ = PopularHistory.objects.get_or_create( popular_history=p, user=user) self._add_users_and_messages(user_pop, e) # Next, go through all the popular history items created for this user # and score them self._calculate_scores(user)
def _test_file_time_getter_tz_handling_off(self, getter): # Django's TZ (and hence the system TZ) is set to Africa/Algiers which # is UTC+1 and has no DST change. We can set the Django TZ to something # else so that UTC, Django's TIME_ZONE, and the system timezone are all # different. now_in_algiers = timezone.make_aware(datetime.now()) with timezone.override(timezone.get_fixed_timezone(-300)): # At this point the system TZ is +1 and the Django TZ # is -5. self.assertFalse(self.storage.exists('test.file.tz.off')) f = ContentFile('custom contents') f_name = self.storage.save('test.file.tz.off', f) self.addCleanup(self.storage.delete, f_name) dt = getter(f_name) # dt should be naive, in system (+1) TZ self.assertTrue(timezone.is_naive(dt)) # The three timezones are indeed distinct. naive_now = datetime.now() algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now) django_offset = timezone.get_current_timezone().utcoffset(naive_now) utc_offset = timezone.utc.utcoffset(naive_now) self.assertGreater(algiers_offset, utc_offset) self.assertLess(django_offset, utc_offset) # dt and naive_now should be the same effective time. self.assertLess(abs(dt - naive_now), timedelta(seconds=2)) # If we convert dt to an aware object using the Algiers # timezone then it should be the same effective time to # now_in_algiers. _dt = timezone.make_aware(dt, now_in_algiers.tzinfo) self.assertLess(abs(_dt - now_in_algiers), timedelta(seconds=2))
def test_link_stats_csv(self): with mock.patch('django.utils.timezone.now') as mock_now: # register usage at a specific date mock_now.return_value = make_aware(datetime(2016, 3, 1, 10, 0, 0)) self.link.register_usage(self.user) mock_now.return_value = make_aware(datetime(2016, 3, 1, 11, 15, 0)) self.link.register_usage(self.user) stats_url = reverse('link-stats-csv', kwargs={'pk': self.link.pk}) response = self.app.get(stats_url) lines = response.body.decode().split("\r\n") dialect = csv.Sniffer().sniff(response.body.decode()) reader = csv.DictReader(lines, dialect=dialect) row = next(reader) self.assertEquals(row, { 'User': '******', 'Duration': '0', 'Date': '2016-03-01 10:00:00', 'Tool': 'Link Linkerly', }) row = next(reader) self.assertEquals(row, { 'User': '******', 'Duration': '0', 'Date': '2016-03-01 11:15:00', 'Tool': 'Link Linkerly', })
def test_get_emp_who_left_on_break(self): """Test for employees who clocked in before start is accurate""" start = timezone.make_aware(dt.datetime(2016, 6, 3, 6, 30)) stop = timezone.make_aware(dt.datetime(2016, 6, 3, 10, 30)) expected_employees = ['010'] neg_counter = len(expected_employees) # test that expected employees is what is found employees = get_emp_who_left_on_break( start=start, stop=stop, ) for employee in employees: print("BREAK!!!") print(employee.EMP_ID_TXT) print(expected_employees) # testing that the length of expected goes to 0 and that we are not missing things self.assertEqual(len(expected_employees), neg_counter) neg_counter -= 1 found_item, expected_employees = find_pop_and_return( looking_for=employee.EMP_ID_TXT, expected_list=expected_employees, ) self.assertEqual(employee.EMP_ID_TXT, found_item)
def edit_profile(request,id): if request.method =='GET' : if request.user.is_authenticated(): now = timezone.make_aware(datetime.now(),timezone.get_default_timezone()) now = formats.date_format(now,"SHORT_DATETIME_FORMAT") data = User.objects.get(pk=id) form = EditProfileForm() dv = {'first_name':data.first_name,'last_name':data.last_name,'email':data.email} form = EditProfileForm(dv) return render(request, 'userprofile_edit.html', {'form':form,'timezones':pytz.common_timezones,"date":str(now)}) else: return redirect('/authen/') elif request.method == 'POST' : now = timezone.make_aware(datetime.now(),timezone.get_default_timezone()) now = formats.date_format(now,"SHORT_DATETIME_FORMAT") data = User.objects.get(pk=id) if request.POST.get('updateuser'): user = request.user time = request.POST['timezone'] form = EditProfileForm(request.POST) time_user = UserProfile.objects.filter(pk=id).update(user_timezone=time) request.session['django_timezone'] = request.POST['timezone'] if form.is_valid(): user.first_name = request.POST['first_name'] user.last_name = request.POST['last_name'] user.email = request.POST['email'] user.save() return redirect('/userprofile/') elif request.POST.get('canceledituser'): return redirect('/userprofile/')
def test_datetime_time_timestamp_override(self): model_instance = StaticTimeStampedModel.objects.get() tz = pytz.timezone(settings.TIME_ZONE) start_time = tz.normalize( make_aware(datetime(2014, 1, 1, 0, 0, 0, 0), tz) ) end_time = tz.normalize( make_aware(datetime(2014, 1, 1, 23, 59, 59, 999999), tz) ) self.assertEquals( model_instance.start, start_time, 'Start time != datetime.min.time(): ({0} != {1})'.format( repr(model_instance.start), repr(start_time) ) ) self.assertEquals( model_instance.end, end_time, 'End time != datetime.max.time(): ({0} != {1})'.format( repr(model_instance.end), repr(end_time) ) )
def test_make_aware2(self): self.assertEqual( timezone.make_aware(datetime.datetime(2011, 9, 1, 12, 20, 30), CET), CET.localize(datetime.datetime(2011, 9, 1, 12, 20, 30)), ) with self.assertRaises(ValueError): timezone.make_aware(CET.localize(datetime.datetime(2011, 9, 1, 12, 20, 30)), CET)
def extend_voting_period(): """Extend voting period by EXTEND_VOTING_PERIOD if there is no majority decision. """ # avoid circular dependencies from remo.voting.models import Poll tomorrow = get_date(days=1) review_count = User.objects.filter(groups__name='Review').count() query_start = make_aware(datetime.combine(tomorrow, datetime.min.time()), pytz.UTC) query_end = make_aware(datetime.combine(tomorrow, datetime.max.time()), pytz.UTC) polls = Poll.objects.filter(end__range=[query_start, query_end]) for poll in polls: if not poll.is_extended: budget_poll = poll.radio_polls.get(question='Budget Approval') majority = reduce(or_, map(lambda x: x.votes > review_count / 2, budget_poll.answers.all())) if not majority: poll.end += timedelta(seconds=EXTEND_VOTING_PERIOD) poll.save() subject = '[Urgent] Voting extended for {0}'.format(poll.name) recipients = (User.objects.filter(groups=poll.valid_groups) .exclude(pk__in=poll.users_voted.all()) .values_list('id', flat=True)) ctx_data = {'poll': poll} template = 'emails/voting_vote_reminder.jinja' send_remo_mail.delay(subject=subject, recipients_list=recipients, email_template=template, data=ctx_data)
def combine_day_and_daymodel_time(self, day_start, daymodel_start, daymodel_delta): """ Combine the day date with the daymodel start time, then calcul the stop from the start date and the given delta time (between daymodel start and stop time) This way we have clean datetimes, correctly calculated with the right day date and daymodel time. Return the calculated start and stop datetimes """ if not isinstance(day_start, datetime.date): combined_date = datetime.datetime.combine(day_start.date(), daymodel_start.time()) else: combined_date = datetime.datetime.combine(day_start, daymodel_start.time()) # Combine day date and daymodel time combined_date = datetime.datetime.combine(day_start, daymodel_start.time()) start = make_aware(combined_date, utc) # Adapt day hour to daylight if needed, we get the offset time for the day date, # substract it the time diff between daylight seasons, then substract the result # to the day date day_dst = (make_aware(datetime.datetime(day_start.year, day_start.month, day_start.day), self.current_tz).utcoffset())-self.seasons_offset start = start-day_dst return ( start, start+daymodel_delta, )
def test_make_aware(self): self.assertEqual( timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT), datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), ) with self.assertRaises(ValueError): timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT)
def observation_info(self, observer): p_object = self.ephem_object if p_object: p_object.compute(observer) up = True if ephem.degrees(p_object.alt) > 0 else False info = { 'alt': str(p_object.alt), 'az': str(p_object.az), 'up': up, } try: info.update({ 'neverup': p_object.neverup }) except: pass try: next_rising = observer.next_rising(p_object) next_setting = observer.next_setting(p_object) info.update({ 'rise': timezone.make_aware(next_rising.datetime(), pytz.UTC) if next_rising else None, 'set': timezone.make_aware(next_setting.datetime(), pytz.UTC) if next_setting else None, }) except ephem.AlwaysUpError: info.update({ 'alwaysup': True }) except: pass return info
def save(self, *args, **kwargs): daymodel = self.cleaned_data['daymodel'] with_content = self.cleaned_data['with_content'] daymodel_delta = daymodel.stop - daymodel.start # Bind datetime for each days using datebook period as the base date daydates = [self.daydate.replace(day=int(item)) for item in self.cleaned_data['days']] # Get time dst for daylight seasons self.current_tz = get_current_timezone() self.winter_offset = make_aware(datetime.datetime(daymodel.start.year, 1, 1), self.current_tz).utcoffset() self.summer_offset = make_aware(datetime.datetime(daymodel.start.year, 7, 1), self.current_tz).utcoffset() self.seasons_offset = (self.summer_offset-self.winter_offset) # Fill existing entries for entry in self.datebook.dayentry_set.filter(activity_date__in=daydates).order_by('activity_date'): # Get the start/stop datetimes goto_start, goto_stop = self.combine_day_and_daymodel_time(entry.start, daymodel.start, daymodel_delta) # Fill object attribute using the daymodel entry.start = goto_start entry.stop = goto_stop entry.pause = daymodel.pause entry.overtime = daymodel.overtime if with_content: entry.content = daymodel.content entry.vacation = False # Allways remove the vacation entry.save() # Remove the day number from remaining selected days i = self.cleaned_data['days'].index(str(entry.activity_date.day)) self.cleaned_data['days'].pop(i) # Create remaining selected days new_days = [] for day_no in self.cleaned_data['days']: activity_date = self.datebook.period.replace(day=int(day_no)) goto_start, goto_stop = self.combine_day_and_daymodel_time(activity_date, daymodel.start, daymodel_delta) content = "" if with_content: content = daymodel.content new_days.append(DayEntry( datebook=self.datebook, activity_date=activity_date, start=goto_start, stop=goto_stop, pause=daymodel.pause, overtime=daymodel.overtime, content=content, vacation=False, )) # Bulk create all new days if new_days: DayEntry.objects.bulk_create(new_days) # Update the datebook because model save method is not triggered with bulk creating self.datebook.modified = tz_now() self.datebook.save() return None
def test_trunc_second_func(self): start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321)) end_datetime = truncate_to(microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)), 'second') if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncSecond('start_datetime')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime, 'second')), (end_datetime, truncate_to(end_datetime, 'second')) ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=TruncSecond('start_time')).order_by('start_datetime'), [ (start_datetime, truncate_to(start_datetime.time(), 'second')), (end_datetime, truncate_to(end_datetime.time(), 'second')) ], lambda m: (m.start_datetime, m.extracted) ) result = 1 if connection.features.supports_microsecond_precision else 2 self.assertEqual(DTModel.objects.filter(start_datetime=TruncSecond('start_datetime')).count(), result) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncSecond('start_date'))) with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'start_date' to DateTimeField"): list(DTModel.objects.annotate(truncated=TruncSecond('start_date', output_field=DateField())))
def test_extract_weekday_func(self): start_datetime = microsecond_support(datetime(2015, 6, 15, 14, 30, 50, 321)) end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeekDay('start_datetime')).order_by('start_datetime'), [ (start_datetime, (start_datetime.isoweekday() % 7) + 1), (end_datetime, (end_datetime.isoweekday() % 7) + 1), ], lambda m: (m.start_datetime, m.extracted) ) self.assertQuerysetEqual( DTModel.objects.annotate(extracted=ExtractWeekDay('start_date')).order_by('start_datetime'), [ (start_datetime, (start_datetime.isoweekday() % 7) + 1), (end_datetime, (end_datetime.isoweekday() % 7) + 1), ], lambda m: (m.start_datetime, m.extracted) ) self.assertEqual(DTModel.objects.filter(start_datetime__week_day=ExtractWeekDay('start_datetime')).count(), 2)
def filter_with_date_range(self, queryset): """ Filter results based that are within a (possibly open ended) daterange """ # Nothing to do if we don't have a date field if not self.date_range_field_name: return queryset # After the start date if self.start_date: start_datetime = timezone.make_aware( datetime.combine(self.start_date, time(0, 0)), timezone.get_default_timezone()) filter_kwargs = { "%s__gt" % self.date_range_field_name: start_datetime, } queryset = queryset.filter(**filter_kwargs) # Before the end of the end date if self.end_date: end_of_end_date = datetime.combine( self.end_date, time(hour=23, minute=59, second=59) ) end_datetime = timezone.make_aware(end_of_end_date, timezone.get_default_timezone()) filter_kwargs = { "%s__lt" % self.date_range_field_name: end_datetime, } queryset = queryset.filter(**filter_kwargs) return queryset
def test_extract_week_func_boundaries(self): end_datetime = microsecond_support(datetime(2016, 6, 15, 14, 10, 50, 123)) if settings.USE_TZ: end_datetime = timezone.make_aware(end_datetime, is_dst=False) week_52_day_2014 = microsecond_support(datetime(2014, 12, 27, 13, 0)) # Sunday week_1_day_2014_2015 = microsecond_support(datetime(2014, 12, 31, 13, 0)) # Wednesday week_53_day_2015 = microsecond_support(datetime(2015, 12, 31, 13, 0)) # Thursday if settings.USE_TZ: week_1_day_2014_2015 = timezone.make_aware(week_1_day_2014_2015, is_dst=False) week_52_day_2014 = timezone.make_aware(week_52_day_2014, is_dst=False) week_53_day_2015 = timezone.make_aware(week_53_day_2015, is_dst=False) days = [week_52_day_2014, week_1_day_2014_2015, week_53_day_2015] self.create_model(week_53_day_2015, end_datetime) self.create_model(week_52_day_2014, end_datetime) self.create_model(week_1_day_2014_2015, end_datetime) qs = DTModel.objects.filter(start_datetime__in=days).annotate( extracted=ExtractWeek('start_datetime'), ).order_by('start_datetime') self.assertQuerysetEqual(qs, [ (week_52_day_2014, 52), (week_1_day_2014_2015, 1), (week_53_day_2015, 53), ], lambda m: (m.start_datetime, m.extracted))
def for_user_on_day(cls, user, date=None): """ Calculates the amount of YooCoins allocated for a user on a particular day """ central = pytz.timezone("US/Central") default = timezone.get_default_timezone() # Get Local Datetime if not date: date = datetime.datetime.now(default) else: date = timezone.make_aware(date, default) # Convert to Central, Snap to Beginning of Day, Convert back date = date.astimezone(central) min = datetime.datetime(date.year, date.month, date.day, 0, 0, 0) min = timezone.make_aware(min, central) min = min.astimezone(default) min = min.replace(tzinfo=None) max = min + datetime.timedelta(days=1) # Retrieve user transactions (for tickets) in the time range transactions = CoinSubmissionTransaction.objects.filter( transaction__wallet__user=user, transaction__added_at__gte=min, transaction__added_at__lt=max ) transactions = transactions.aggregate(models.Sum("transaction__amount")) result = transactions["transaction__amount__sum"] if transactions["transaction__amount__sum"] else 0 return result
def post(self, request): body = json.loads(request.body.decode("utf-8")) start = make_aware( datetime.datetime.strptime(body['range']['from'], DATETIME_FORMAT), pytz.utc) end = make_aware( datetime.datetime.strptime(body['range']['to'], DATETIME_FORMAT), pytz.utc) results = [] for target in body['targets']: response = { 'target': target['target'], 'datapoints': [] } for stat in models.Stat.objects\ .order_by('created')\ .filter(key=target['target'])\ .filter(created__gte=start)\ .filter(created__lte=end): # Time needs to be in milliseconds response['datapoints'].append([stat.value, stat.created_unix * 1000]) results.append(response) return JsonResponse(results, safe=False)
def setUp(self): self.survey = factories.Survey.create(role=survey_models.Survey.PATIENT_FEEDBACK) self.clinic1 = factories.Clinic.create(name='Clinic1') self.clinic2 = factories.Clinic.create(name='Clinic2') self.service1 = factories.Service.create(name='Service1') self.service2 = factories.Service.create(name='Service2') dt1 = timezone.make_aware(timezone.datetime(2014, 7, 22), timezone.utc) dt2 = timezone.make_aware(timezone.datetime(2014, 7, 25), timezone.utc) visit1 = factories.Visit.create( patient=factories.Patient.create(clinic=self.clinic1), service=self.service1, visit_time=dt1) visit2 = factories.Visit.create( patient=factories.Patient.create(clinic=self.clinic2), service=self.service2, visit_time=dt2) visit3 = factories.Visit.create( patient=factories.Patient.create(clinic=self.clinic1), service=self.service2, visit_time=dt2) factories.SurveyQuestionResponse.create( visit=visit1, question__survey=self.survey) factories.SurveyQuestionResponse.create( visit=visit2, question__survey=self.survey) factories.SurveyQuestionResponse.create( visit=visit3, question__survey=self.survey) self.responses = survey_models.SurveyQuestionResponse.objects.all()
def datefilter(eventqs, context, start=None, end=None): today = datetime.date.today() weekfromnow = today + datetime.timedelta(days=7) if start: context["start"] = start try: startdate = make_aware(datetime.datetime.strptime(start, "%Y-%m-%d")) eventqs = eventqs.filter(datetime_start__gte=startdate) except: raise else: eventqs = eventqs.filter(datetime_start__gte=today) if end: context["end"] = end try: enddate = make_aware(datetime.datetime.strptime(end, "%Y-%m-%d")) eventqs = eventqs.filter(datetime_end__lte=enddate) except: raise else: eventqs = eventqs.filter(datetime_end__lte=weekfromnow) return eventqs, context
def check_uncheck_sampo(self): action = self.request.GET.get('action') time = self.request.GET.get('time') if time: hhmm = map(lambda x: int(x), self.request.GET['time'].split(':')) else: hhmm = [0, 0] date_str = self.request.GET.get('date') if date_str: date = map(lambda x: int(x), date_str.split('.')) date_params = dict( zip(('day', 'month', 'year', 'hour', 'minute'), date+hhmm) ) now = make_aware(datetime.datetime(**date_params), timezone(TIME_ZONE)) else: now = make_aware(datetime.datetime.now(), timezone(TIME_ZONE)).replace(hour=hhmm[0], minute=hhmm[1], second=0, microsecond=0) if action == 'check': new_usage = SampoPassUsage( sampo_pass_id=int(self.request.GET['pid']), date=now ) new_usage.save() passes, payments, _ = get_sampo_details(now) _json = json.dumps({ 'payments': payments }) return HttpResponse(_json) elif action == 'uncheck': # todo Если админ системы удалит запись отсюда за любой день кроме сегоднешнего, удалится не та запись! # todo решать эту проблему лучше через передачу в функцию праильной даты... last_usage = SampoPassUsage.objects.filter( sampo_pass_id=int(self.request.GET['pid']), date__range=( now.replace(hour=0, minute=0, second=0, microsecond=0), now.replace(hour=23, minute=59, second=59, microsecond=999999) ) ).last() if last_usage: last_usage.delete() passes, payments, _ = get_sampo_details(now) _json = json.dumps({ 'payments': payments }) return HttpResponse(_json) else: return HttpResponseServerError('failed')
def _update_feed(feed): logging.info("Update started.") old_limit = timezone.make_aware( datetime.now(), timezone.get_default_timezone() ) - timedelta( seconds = feed.update_interval ) if feed.last_updated is not None and old_limit < feed.last_updated: logging.info("Update not needed.") #return # Commented out for testing purposes. parsed_feed = feedparser.parse(feed.url) if parsed_feed.get('status') != 200: logging.warning("Feed '{url}' errored with message '{msg}'".format(url=feed.url, msg=str(parsed_feed['bozo_exception']).strip())) for entry in parsed_feed['entries']: published = dateutil.parser.parse(entry['published']) if feed.last_pubdate is None or published > feed.last_pubdate: logging.info('Update added new post: '+entry['guid']) feed.last_pubdate = timezone.make_aware(published, timezone.get_default_timezone()) for callback in _get_callbacks(feed): try: entry = jsonpickle.encode(entry, unpicklable=False) urllib2.urlopen(callback.url, data=entry) except Exception as err: logging.error("ERROR: " +str(err)) feed.last_updated = timezone.make_aware(datetime.now(), timezone.get_default_timezone()) feed.save()
def test_list_queryset(self): url = reverse('banktransactionschedulers:list', kwargs={ 'bankaccount_pk': self.bankaccount.pk }) bts1 = BankTransactionSchedulerFactory( bankaccount=self.bankaccount, last_action=timezone.make_aware(datetime.datetime(2015, 7, 10)), ) bts2 = BankTransactionSchedulerFactory( bankaccount=self.bankaccount, last_action=timezone.make_aware(datetime.datetime(2015, 7, 9)), ) # Scheduler of another bank account. BankTransactionSchedulerFactory() self.client.force_login(self.owner) response = self.client.get(url) self.assertQuerysetEqual( response.context['object_list'], [ repr(bts1), repr(bts2), ], )
def admin(request, msg=None): """ admin landing page """ context = {} context['msg'] = msg if settings.LANDING_TIMEDELTA: delta = settings.LANDING_TIMEDELTA else: delta = 48 # fuzzy delta today = timezone.now() today_min = timezone.make_aware(datetime.datetime.combine(today.date(), datetime.time.min)) end = today + datetime.timedelta(hours=delta) end_max = timezone.make_aware(datetime.datetime.combine(end.date(), datetime.time.max)) # get upcoming and ongoing events events = Event.objects.filter( Q(datetime_start__range=(today_min, end_max)) | Q(datetime_end__range=(today_min, end_max))).order_by( 'datetime_start').filter(approved=True).exclude(Q(closed=True) | Q(cancelled=True)) context['events'] = events context['tznow'] = today return render(request, 'admin.html', context)
def parse_duration(begin, value, errors): if len(value) > 1: try: i = int(value[:-1]) if value[-1] == 'y': return timezone.make_aware(datetime(begin.year + i, begin.month, begin.day, begin.hour, begin.minute, begin.second), timezone.get_current_timezone()) if value[-1] == 'm': yi = 0 while begin.month + i > 12: yi += 1 i = begin.month + i - 12 return timezone.make_aware(datetime(begin.year + yi, begin.month + i, begin.day, begin.hour, begin.minute, begin.second), timezone.get_current_timezone()) if value[-1] == 'd': return begin + timedelta(days=i) if value[-1] == 'h': return begin + timedelta(hours=i) if value[-1] == 'w': return begin + timedelta(weeks=i) except ValueError: pass errors.append(_("Unable to parse duration: {}").format(value)) return None
def updated_date_range(self): ''' Filter by a date range for last_updated. Options should contain date_range_from and date_range_to, each of which are simply date objects. ''' options = self.options date_from = datetime.combine(options['date_range_from'], time(0, 0)) date_from = tz.make_aware(date_from, tz.get_default_timezone()) date_from = date_from.astimezone(tz.utc) date_to = datetime.combine(options['date_range_to'], time(23, 59, 59, 99)) date_to = tz.make_aware(date_to, tz.get_default_timezone()) date_to = date_to.astimezone(tz.utc) if self.model._meta.object_name != 'RecordMetadata': prefix = 'record_metadata__' else: prefix = '' filter = [ { '{}record_last_updated_gmt__gte'.format(prefix): date_from, '{}record_last_updated_gmt__lte'.format(prefix): date_to }, { '{}deletion_date_gmt__gte'.format(prefix): date_from, '{}deletion_date_gmt__lte'.format(prefix): date_to, } ] order_by = ['{}record_last_updated_gmt'.format(prefix)] return {'filter': filter, 'order_by': order_by}
def test_extract_year_exact_lookup(self): """ Extract year uses a BETWEEN filter to compare the year to allow indexes to be used. """ start_datetime = datetime(2015, 6, 15, 14, 10) end_datetime = datetime(2016, 6, 15, 14, 10) if settings.USE_TZ: start_datetime = timezone.make_aware(start_datetime, is_dst=False) end_datetime = timezone.make_aware(end_datetime, is_dst=False) self.create_model(start_datetime, end_datetime) self.create_model(end_datetime, start_datetime) qs = DTModel.objects.filter(start_datetime__year__exact=2015) self.assertEqual(qs.count(), 1) query_string = str(qs.query).lower() self.assertEqual(query_string.count(' between '), 1) self.assertEqual(query_string.count('extract'), 0) # exact is implied and should be the same qs = DTModel.objects.filter(start_datetime__year=2015) self.assertEqual(qs.count(), 1) query_string = str(qs.query).lower() self.assertEqual(query_string.count(' between '), 1) self.assertEqual(query_string.count('extract'), 0) # date and datetime fields should behave the same qs = DTModel.objects.filter(start_date__year=2015) self.assertEqual(qs.count(), 1) query_string = str(qs.query).lower() self.assertEqual(query_string.count(' between '), 1) self.assertEqual(query_string.count('extract'), 0)
from datetime import timedelta, time, datetime from django.core.mail import mail_admins from django.core.management import BaseCommand from django.utils import timezone from django.utils.timezone import make_aware from orders.models import Order today = timezone.now() tomorrow = today + timedelta(1) today_start = make_aware(datetime.combine(today, time())) today_end = make_aware(datetime.combine(tomorrow, time())) class Command(BaseCommand): help = "Send Today's Orders Report to Admins" def handle(self, *args, **options): orders = Order.objects.filter(confirmed_date__range=(today_start, today_end)) if orders: message = "" for order in orders: message += f"{order} \n" subject = (f"Order Report for {today_start.strftime('%Y-%m-%d')} " f"to {today_end.strftime('%Y-%m-%d')}") mail_admins(subject=subject, message=message, html_message=None)
def parse_newsitem(self, response): soup = BeautifulSoup(response.text, 'html.parser') item = NewsItemItem() if (title := response.css('h1.page-titles::text').get()) is not None: item['title'] = title.strip() else: log.warning("%s is not a news item" % response.url) return date_elem = soup.find('p', class_='noticia-data') if not hasattr(date_elem, "parent"): log.warning("Skipped %s" % response.url) return content_elem = date_elem.parent item['datetime'] = make_aware( datetime.strptime(date_elem.text.strip(), '%d-%m-%Y')) content = str(content_elem.find('div', class_="noticia-corpo")) content = mdconverter.handle(content).strip() item['content'] = content item['html'] = response.text item['source'] = response.url img_elem = content_elem.find('img', class_="imagem-noticia") if img_elem is None: yield item else: yield Request(img_elem.attrs['src'], self.parse_image, meta={'item': item}) def parse_image(self, response): body = response.body
def test_publish_json_dsmr_reading(self, now_mock, queue_message_mock): now_mock.return_value = timezone.make_aware(timezone.datetime( 2018, 1, 1), timezone=timezone.utc) json_settings = telegram.JSONTelegramMQTTSettings.get_solo() dsmr_reading = self._create_dsmrreading() # Mapping. json_settings.formatting = ''' [mapping] # READING FIELD = JSON FIELD id = aaa timestamp = bbb electricity_delivered_1 = ccc electricity_returned_1 = ddd electricity_delivered_2 = eee electricity_returned_2 = fff electricity_currently_delivered = ggg electricity_currently_returned = hhh phase_currently_delivered_l1 = iii phase_currently_delivered_l2 = jjj phase_currently_delivered_l3 = kkk phase_currently_returned_l1 = lll phase_currently_returned_l2 = mmm phase_currently_returned_l3 = nnn extra_device_timestamp = ooo extra_device_delivered = ppp ''' json_settings.save() # Disabled by default. self.assertFalse(json_settings.enabled) self.assertFalse(queue_message_mock.called) dsmr_mqtt.services.callbacks.publish_json_dsmr_reading( reading=dsmr_reading) self.assertFalse(queue_message_mock.called) # Now enabled. json_settings.enabled = True json_settings.save() dsmr_mqtt.services.callbacks.publish_json_dsmr_reading( reading=dsmr_reading) self.assertTrue(queue_message_mock.called) _, _, kwargs = queue_message_mock.mock_calls[0] payload = json.loads(kwargs['payload']) self.assertEqual(payload['aaa'], DsmrReading.objects.get().pk) self.assertEqual(payload['bbb'], '2018-01-01T00:00:00Z') self.assertEqual(payload['ccc'], 1) self.assertEqual(payload['ddd'], 2) self.assertEqual(payload['eee'], 3) self.assertEqual(payload['fff'], 4) self.assertEqual(payload['ggg'], 5) self.assertEqual(payload['hhh'], 6) self.assertEqual(payload['iii'], 0.25) self.assertEqual(payload['jjj'], 0.35) self.assertEqual(payload['kkk'], 0.3) self.assertEqual(payload['lll'], 0.5) self.assertEqual(payload['mmm'], 0.75) self.assertEqual(payload['nnn'], 1.25) self.assertEqual(payload['ooo'], '2018-01-01T12:00:00Z') self.assertIsNone(payload['ppp']) # Check timezone conversion. telegram.JSONTelegramMQTTSettings.objects.update( use_local_timezone=True) queue_message_mock.reset_mock() dsmr_mqtt.services.callbacks.publish_json_dsmr_reading( reading=dsmr_reading) _, _, kwargs = queue_message_mock.mock_calls[0] payload = json.loads(kwargs['payload']) self.assertEqual(payload['bbb'], '2018-01-01T01:00:00+01:00') # No longer UTC. self.assertEqual(payload['ooo'], '2018-01-01T13:00:00+01:00') # No longer UTC.
def processPicklist(exp, f): """ Takes picklist file and processes the data, updating soaks in destination plates as appropriate """ from my_utils.constants import reverse_subwell_map, subwell_map from io import TextIOWrapper from collections import OrderedDict file_reader = csv.reader(TextIOWrapper(f), delimiter=',') soaks_qs = exp.soaks.select_related('dest__parentWell__plate') soaks_map = OrderedDict() for soak in soaks_qs: soak_key = "_".join([ soak.dest.parentWell.plate.rockMakerId, soak.dest.parentWell.name, subwell_map[soak.dest.idx] ]) soaks_map[soak_key] = soak storage_map = OrderedDict() for container in XtalContainer.objects.all(): storage_map[container.name] = container # { # 'container':container, # } rows_dict = OrderedDict() for row in file_reader: data = OrderedDict([ ('plate_type', ''), ('plate_id', ''), ('location', ''), ('plate_row', ''), ('plate_column', ''), ('plate_subwell', ''), ('comment', ''), ('crystal_id', ''), ('arrival_time', ''), ('departure_time', ''), ('duration', ''), ('destination_name', ''), ('destination_location', ''), ('barcode', ''), ('external_comment', ''), ]) data_keys = list(data.keys()) for i, col in enumerate(row): data[data_keys[i]] = col tweaked_col = data['plate_column'] if len( data['plate_column']) == 2 else '0' + data['plate_column'] rows_dict["_".join([ data['plate_id'], data['plate_row'] + tweaked_col, data['plate_subwell'] ])] = data soaks = [] for k, v in rows_dict.items(): soak = soaks_map.get(k) if soak: soaks.append(soak) storage = storage_map.get(v['destination_name'], None) storage_position = int(v['destination_location'] ) if v['destination_location'] else None soak.isMounted = True soak.storage_location = v['destination_name'] soak.storage_position = storage_position if storage: soak.storage = storage # soak.storage_nth = exp.soaks.filter(storage=storage['container'], storage_position=storage_position).count() soak.shifterComment = v['comment'] soak.shifterCrystalID = v['crystal_id'] soak.shifterArrivalTime = make_aware( datetime.strptime( v['arrival_time'], '%Y-%m-%d %H:%M:%S.%f')) if v['arrival_time'] else None soak.shifterDepartureTime = make_aware( datetime.strptime( v['departure_time'], '%Y-%m-%d %H:%M:%S.%f')) if v['departure_time'] else None soak.barcode = v['barcode'] soak.shifterExternalComment = v['external_comment'] Soak.objects.bulk_update(soaks, fields=[ 'storage_position', 'storage_location', 'storage', 'shifterComment', 'shifterCrystalID', 'shifterArrivalTime', 'shifterDepartureTime', 'barcode', 'shifterExternalComment' ]) for soak in soaks: if soak.storage_id and soak.storage_position: soak.storage_nth = exp.soaks.filter( storage_id=soak.storage_id, storage_position=soak.storage_position).count() else: soak.storage_nth = 0 Soak.objects.bulk_update(soaks, fields=['storage_nth'])
def forward(apps, schema_editor): User = apps.get_model(settings.AUTH_USER_MODEL) OldBill = apps.get_model("nadine", "OldBill") Transaction = apps.get_model("nadine", "Transaction") BillingBatch = apps.get_model("nadine", "BillingBatch") UserBill = apps.get_model("nadine", "UserBill") BillLineItem = apps.get_model("nadine", "BillLineItem") CoworkingDayLineItem = apps.get_model("nadine", "CoworkingDayLineItem") CoworkingDay = apps.get_model("nadine", "CoworkingDay") Payment = apps.get_model("nadine", "Payment") Resource = apps.get_model("nadine", "Resource") tz = timezone.get_current_timezone() # print # Pull our Coworking Day Resource DAY = Resource.objects.filter(key="day").first() # Create a BillingBatch for all these new Bills batch = BillingBatch.objects.create() # print(" Migrating Old Bills...") for old_bill in OldBill.objects.all().order_by('bill_date'): # OldBill -> UserBill if old_bill.paid_by: user = old_bill.paid_by else: user = old_bill.user start = old_bill.bill_date end = start + relativedelta(months=1) - timedelta(days=1) bill = UserBill.objects.create( user=user, period_start=start, period_end=end, due_date=old_bill.bill_date, note='Migrated bill (Nadine 1.8)', ) bill_date = datetime.combine(old_bill.bill_date, datetime.min.time()) bill.created_ts = timezone.make_aware(bill_date, tz) bill.save() # Add this bill to our BillingBatch batch.bills.add(bill) # We'll create one line item for the membership description = "Coworking Membership" if old_bill.membership: description = old_bill.membership.membership_plan.name + " " + description BillLineItem.objects.create( bill=bill, description=description, amount=old_bill.amount, ) # Add all the dropins for day in old_bill.dropins.all().order_by('visit_date'): CoworkingDayLineItem.objects.create( bill=bill, description="%s Coworking Day" % day.visit_date, day=day, amount=0, ) # Associate this day with this bill day.bill = bill day.save() # Add all our guest dropins for day in old_bill.guest_dropins.all().order_by('visit_date'): CoworkingDayLineItem.objects.create( bill=bill, description="%s Guest Coworking Day (%s)" % (day.visit_date, day.user.username), day=day, amount=0, ) # Associate this day with this bill day.bill = bill day.save() # If there are any transactions on this bill # we are going to manually mark this as closed and paid if old_bill.transactions.count() > 0: close_date = datetime.combine(bill.period_end, datetime.max.time()) bill.closed_ts = timezone.make_aware(close_date, tz) bill.mark_paid = True bill.save() # Transactions -> Payments for t in old_bill.transactions.filter(new_payment__isnull=True): p = Payment.objects.create( bill=bill, user=user, amount=t.amount, note=t.note, ) p.created_ts = t.transaction_date p.save() # Link to the new payment t.new_payment = p t.save() # Handle CoworkingDays older than our cutoff that were not billed yet # date_cutoff = localtime(now()).date() - timedelta(days=45) date_cutoff = date(2017, 6, 1) loose_days = CoworkingDay.objects.filter( bill__isnull=True, visit_date__lte=date_cutoff).order_by('visit_date') user = User.objects.first() # Only do this step if there is data in the system if loose_days and user: # print(" Associating %d unbilled days" % len(loose_days)) first_day = loose_days.first().visit_date last_day = loose_days.last().visit_date bill = UserBill.objects.create(user=user, period_start=first_day, period_end=last_day, due_date=last_day) bill.note = "This bill includes all days unbilled before %s (Nadine 1.8)" % date_cutoff for day in loose_days: description = "Coworking Day on %s: %s (%s)" % ( day.visit_date, day.user.username, day.payment) CoworkingDayLineItem.objects.create( bill=bill, description=description, amount=0, day=day, ) day.bill = bill day.save() bill.mark_paid = True bill.closed_ts = localtime(now()) bill.save() # print("UserBill %d: %s %s to %s" % (bill.id, bill.user.username, bill.period_start, bill.period_end)) batch.bills.add(bill) # Close up this BillingBatch batch.completed_ts = localtime(now()) batch.successful = True batch.save()
def _usages_by_month(self): """ Returns readings and column definitions formatted and aggregated to display all records in monthly intervals. At a high-level, following algorithm is used to acccomplish this: - Identify the first start time and last end time - For each month between, aggregate the readings found in that month - The highest possible reading total without overlapping times is found - For more details how that monthly aggregation occurs, see _max_reading_total() """ # Used to consolidate different readings (types) within the same month monthly_readings = defaultdict(lambda: {}) # Construct column_defs using this dictionary's values for frontend to use column_defs = { '_month': { 'field': 'month', '_filter_type': 'datetime', }, } for meter in self.meters: field_name, conversion_factor = self._build_column_def( meter, column_defs) min_time = meter.meter_readings.earliest( 'start_time').start_time.astimezone(tz=self.tz) max_time = meter.meter_readings.latest( 'end_time').end_time.astimezone(tz=self.tz) # Iterate through months current_month_time = min_time while current_month_time < max_time: _weekday, days_in_month = monthrange(current_month_time.year, current_month_time.month) unaware_end = datetime(current_month_time.year, current_month_time.month, days_in_month, 23, 59, 59) + timedelta(seconds=1) end_of_month = make_aware(unaware_end, timezone=self.tz) # Find all meters fully contained within this month (second-level granularity) interval_readings = meter.meter_readings.filter( start_time__range=(current_month_time, end_of_month), end_time__range=(current_month_time, end_of_month)) if interval_readings.exists(): readings_list = list( interval_readings.order_by('end_time')) reading_month_total = self._max_reading_total( readings_list) if reading_month_total > 0: month_year = '{} {}'.format( month_name[current_month_time.month], current_month_time.year) monthly_readings[month_year]['month'] = month_year monthly_readings[month_year][ field_name] = reading_month_total / conversion_factor current_month_time = end_of_month return { 'readings': list(monthly_readings.values()), 'column_defs': list(column_defs.values()) }
def test_display_my_search_results_with_one_results(self): user = User.objects.create_user("john", "*****@*****.**", "johnpassword") self.client.login(username="******", password="******") recherche = Recherche( recherche="gloire de mon père", max_resultats=3, utilisateur_id=user.id, ) recherche.save() france_3 = Chaines.objects.create(id_chaine="france_3", nom="FRANCE 3") france_3.save() recherche.chaines.add(france_3.id) recherche_specifique = RechercheSpecifique( titre="La gloire de mon père", description="un film de", recherche_id=recherche.id, ) recherche_specifique.save() gloire = Programmes.objects.create( chaines=france_3, date_debut=make_aware(datetime.datetime(3021, 2, 19, 17, 10, 41)), date_fin=make_aware(datetime.datetime(3022, 2, 19, 17, 10, 41)), titre_informatif="Titre_pagnol", description="Un film de Pagnol...", date_realisation=1990, public=18, aide_sourd=True, note=5, critique="C'est trop bien", ) gloire.save() titre_gloire = Titres.objects.create( programmes_id=gloire.id, nom="La gloire de mon Père", ) titre_gloire.save() response_get = self.client.get( reverse("my_results", kwargs={"my_search_id": recherche.id})) assert (response_get.context["info_search"]["recherche"] == "gloire de mon père") assert (response_get.context["info_search"]["titre"] == "La gloire de mon père") assert len(response_get.context["info_programmes"]) == 1 assert (response_get.context["info_programmes"][0]["titres"][0].nom == "La gloire de mon Père") assert (response_get.context["info_programmes"][0] ["programme"].description == "Un film de Pagnol...") assert response_get.status_code == 200 assert response_get.templates[0].name == "programmes/results.html"
def test_publish_split_topic_dsmr_reading(self, now_mock, queue_message_mock): now_mock.return_value = timezone.make_aware(timezone.datetime( 2018, 1, 1), timezone=timezone.utc) split_topic_settings = telegram.SplitTopicTelegramMQTTSettings.get_solo( ) dsmr_reading = self._create_dsmrreading() # Mapping. split_topic_settings.formatting = ''' [mapping] # READING FIELD = TOPIC PATH id = dsmr/telegram/id timestamp = dsmr/telegram/timestamp electricity_delivered_1 = dsmr/telegram/electricity_delivered_1 electricity_returned_1 = dsmr/telegram/electricity_returned_1 electricity_delivered_2 = dsmr/telegram/electricity_delivered_2 electricity_returned_2 = dsmr/telegram/electricity_returned_2 electricity_currently_delivered = dsmr/telegram/electricity_currently_delivered electricity_currently_returned = dsmr/telegram/electricity_currently_returned phase_currently_delivered_l1 = dsmr/telegram/phase_currently_delivered_l1 phase_currently_delivered_l2 = dsmr/telegram/phase_currently_delivered_l2 phase_currently_delivered_l3 = dsmr/telegram/phase_currently_delivered_l3 phase_currently_returned_l1 = dsmr/telegram/phase_currently_returned_l1 phase_currently_returned_l2 = dsmr/telegram/phase_currently_returned_l2 phase_currently_returned_l3 = dsmr/telegram/phase_currently_returned_l3 extra_device_timestamp = dsmr/telegram/extra_device_timestamp extra_device_delivered = dsmr/telegram/extra_device_delivered ''' split_topic_settings.save() # Disabled by default. self.assertFalse(split_topic_settings.enabled) self.assertFalse(queue_message_mock.called) dsmr_mqtt.services.callbacks.publish_split_topic_dsmr_reading( reading=dsmr_reading) self.assertFalse(queue_message_mock.called) # Now enabled. queue_message_mock.reset_mock() split_topic_settings.enabled = True split_topic_settings.save() dsmr_mqtt.services.callbacks.publish_split_topic_dsmr_reading( reading=dsmr_reading) self.assertTrue(queue_message_mock.called) # Assert timezone UTC for this test. called_kwargs = [x[1] for x in queue_message_mock.call_args_list] expected = { 'payload': '2018-01-01T00:00:00Z', 'topic': 'dsmr/telegram/timestamp' } self.assertIn(expected, called_kwargs) expected = { 'payload': '2018-01-01T12:00:00Z', 'topic': 'dsmr/telegram/extra_device_timestamp' } self.assertIn(expected, called_kwargs) # Check timezone conversion. telegram.SplitTopicTelegramMQTTSettings.objects.update( use_local_timezone=True) queue_message_mock.reset_mock() dsmr_mqtt.services.callbacks.publish_split_topic_dsmr_reading( reading=dsmr_reading) called_kwargs = [x[1] for x in queue_message_mock.call_args_list] expected = { 'payload': '2018-01-01T01:00:00+01:00', # No longer UTC. 'topic': 'dsmr/telegram/timestamp' } self.assertIn(expected, called_kwargs) expected = { 'payload': '2018-01-01T13:00:00+01:00', # No longer UTC. 'topic': 'dsmr/telegram/extra_device_timestamp' } self.assertIn(expected, called_kwargs)
def test_get_datetime_start_with_profile(self): """test get sarting app datetime including tz_offset""" self.assertEqual( self.stats.datetime_start, make_aware(datetime.datetime(2019, 9, 28, 11, 0), pytz.utc))
def test_recherche_and_recherche_specifique_with_letter_accent_insensitivity( self, ): france_3 = Chaines.objects.create(id_chaine="france_3", nom="FRANCE 3") france_3.save() id_france_3 = france_3.id gloire = Programmes.objects.create( chaines=france_3, date_debut=make_aware(datetime.datetime(3021, 2, 19, 17, 10, 41)), date_fin=make_aware(datetime.datetime(3022, 2, 19, 17, 10, 41)), titre_informatif="Titre_pâgnol", description="Un film de Pâgnol...", date_realisation=1990, public=18, aide_sourd=True, note=5, critique="C'est trôp bien", ) gloire.save() titre_gloire = Titres.objects.create( programmes_id=gloire.id, nom="La gloire de mon Père", ) titre_gloire.save() realisateur = Realisateur.objects.create( programmes_id=gloire.id, nom="Yves Rôbert", ) realisateur.save() acteur = Acteurs.objects.create(programmes_id=gloire.id, nom="Jûlien CIAMACA", role="Mârcel Pagnol") acteur.save() scenariste = Scenariste.objects.create( programmes_id=gloire.id, nom="Lôuis Nucera", ) scenariste.save() categorie = Categories.objects.create(nom="fîlm") categorie.save() categorie.programmes.add(gloire.id) series = Series.objects.create(serie=1, episode=2, partie=3, programmes_id=gloire.id) series.save() pays_realisation = PaysRealisation.objects.create(nom="Frânce") pays_realisation.save() pays_realisation.programmes.add(gloire.id) data = urlencode({ "chaines_tv": id_france_3, "recherche": "marcel", "max_resultats": 4, "titre": "la gloîre de mon pere", "titre_informatif": "titre_pagnôl", "description": "Un film de Pagnôl", "realisateur": "robért", "acteur": "JULIÊN", "role": "marcèl", "scenariste": "LOUÎS", "date_realisation": 1990, "categories": "FÏLM", "serie": 1, "episode": 2, "partie": 3, "pays_realisation": "France", "public": 18, "aide_sourd": True, "note": 5, "critique": "c'est trop bièn", }) response_post = self.client.post( reverse("welcome"), data, content_type="application/x-www-form-urlencoded", ) assert response_post.status_code == 200 assert len(response_post.context["info_programmes"]) == 1 assert (response_post.context["info_programmes"][0]["titres"][0].nom == "La gloire de mon Père") assert (response_post.context["info_programmes"][0]["chaine"] == "FRANCE 3") assert (response_post.context["info_programmes"][0] ["programme"].titre_informatif == "Titre_pâgnol") assert (response_post.context["info_programmes"][0] ["programme"].description == "Un film de Pâgnol...") assert (response_post.context["info_programmes"][0]["realisateur"] [0].nom == "Yves Rôbert") assert (response_post.context["info_programmes"][0]["acteurs"][0].nom == "Jûlien CIAMACA") assert (response_post.context["info_programmes"][0]["acteurs"][0].role == "Mârcel Pagnol") assert (response_post.context["info_programmes"][0]["scenariste"] [0].nom == "Lôuis Nucera") assert (response_post.context["info_programmes"][0] ["programme"].date_realisation == 1990) assert (response_post.context["info_programmes"][0]["categories"] [0].nom == "fîlm") assert (response_post.context["info_programmes"][0]["series"][0].serie == 1) assert (response_post.context["info_programmes"][0]["series"] [0].episode == 2) assert (response_post.context["info_programmes"][0]["series"][0].partie == 3) assert (response_post.context["info_programmes"][0]["pays"][0].nom == "Frânce") assert (response_post.context["info_programmes"][0] ["programme"].aide_sourd == True) assert ( response_post.context["info_programmes"][0]["programme"].note == 5) assert (response_post.context["info_programmes"][0] ["programme"].critique == "C'est trôp bien") assert response_post.templates[0].name == "programmes/results.html"
def convert_datetimefield_value(self, value, expression, connection, context): if value is not None: if settings.USE_TZ: value = timezone.make_aware(value, self.connection.timezone) return value
def make_local_datetime(date): return timezone.make_aware(datetime.datetime.combine(date, datetime.time.min))
def create_example_data(): # Create administrator user, created = User.objects.get_or_create(username='******', defaults={ 'is_superuser': True, 'is_staff': True, }) if created: user.set_password('1234') user.save() # Site config site_config = SiteConfiguration.get_global() site_config.about_footer = ''' RadioCo is a broadcasting radio recording scheduling system. RadioCo has been intended to provide a solution for a wide range of broadcast projects, from community to public and commercial stations. ''' site_config.more_about_us = 'Live shows are recorded and published automatically' site_config.address = 'http://radioco.org/' site_config.facebook_address = 'https://facebook.com/radioco.org' site_config.twitter_address = 'https://twitter.com/RadioCo_org' site_config.save() # Programme 1 synopsis = ''' Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. ''' programme, created = Programme.objects.get_or_create( name='Morning News', defaults=dict( synopsis=synopsis, language='en', photo='defaults/example/radio_1.jpg', current_season=1, category='News & Politics', )) slot, created = Slot.objects.get_or_create( programme=programme, runtime=datetime.timedelta(minutes=60)) recurrences = recurrence.Recurrence( dtstart=timezone.make_aware(datetime.datetime(2015, 1, 1, 8, 0, 0)), rrules=[recurrence.Rule(recurrence.DAILY)]) recurrences_repetition = recurrence.Recurrence( dtstart=timezone.make_aware(datetime.datetime(2015, 1, 1, 20, 0, 0)), rrules=[recurrence.Rule(recurrence.DAILY)]) Schedule.objects.get_or_create(slot=slot, type='L', recurrences=recurrences) Schedule.objects.get_or_create(slot=slot, type='R', recurrences=recurrences_repetition) for number in range(1, 4): episode, created = Episode.objects.get_or_create( title='Episode %s' % number, programme=programme, summary=synopsis, season=1, number_in_season=number, ) if number == 1: Podcast.objects.get_or_create( episode=episode, url= 'https://archive.org/download/Backstate_Wife/1945-08-10_-_1600_-_Backstage_Wife_-_Mary_And_Larry_See_A_Twenty_Year_Old_Portrait_That_Looks_Exactly_Like_Mary_-_32-22_-_14m13s.mp3', mime_type='audio/mp3', length=0, duration=853) for username_counter in range(1, 6): titles = [ '', 'Mark Webber', 'Paul Jameson', 'Laura Sommers', 'Martin Blunt', 'John Smith' ] user, created = User.objects.get_or_create( username='******' % username_counter, defaults={'first_name': titles[username_counter]}) user.userprofile.bio = synopsis user.userprofile.avatar = 'defaults/example/user_%s.jpg' % username_counter user.userprofile.display_personal_page = True user.userprofile.save() Role.objects.get_or_create(person=user, programme=programme, defaults={ 'role': CONTRIBUTOR, 'description': synopsis, }) # Programme 2 - 5 titles = [ '', 'Places To Go', 'The best wine', 'Local Gossips', 'Classic hits' ] for programme_counter in range(1, 5): programme, created = Programme.objects.get_or_create( name=titles[programme_counter], synopsis=synopsis, language='en', photo='defaults/example/radio_%s.jpg' % str(programme_counter + 1), current_season=7, category='News & Politics', ) slot, created = Slot.objects.get_or_create( programme=programme, runtime=datetime.timedelta(minutes=60)) recurrences = recurrence.Recurrence( dtstart=( timezone.make_aware(datetime.datetime(2015, 1, 1, 10, 0, 0)) + datetime.timedelta(hours=programme_counter)), rrules=[recurrence.Rule(recurrence.DAILY)]) Schedule.objects.get_or_create(slot=slot, type='L', recurrences=recurrences) for season in range(1, 8): for number in range(1, 6): Episode.objects.get_or_create( title='Episode %s' % number, programme=programme, summary="Summary Season {}, Number {}: {}".format( season, number, synopsis), season=season, number_in_season=number, ) for programme in Programme.objects.all(): rearrange_episodes(programme, timezone.make_aware(datetime.datetime(1970, 1, 1)))
def render(self, form_data): merger = PdfFileMerger() qs = OrderPosition.objects.filter( order__event__in=self.events).prefetch_related( 'answers', 'answers__question').select_related('order', 'item', 'variation', 'addon_to') if form_data.get('include_pending'): qs = qs.filter( order__status__in=[Order.STATUS_PAID, Order.STATUS_PENDING]) else: qs = qs.filter(order__status__in=[Order.STATUS_PAID]) if form_data.get('date_from'): dt = make_aware( datetime.combine( dateutil.parser.parse(form_data['date_from']).date(), time(hour=0, minute=0, second=0)), self.timezone) qs = qs.filter( Q(subevent__date_from__gte=dt) | Q(subevent__isnull=True, order__event__date_from__gte=dt)) if form_data.get('date_to'): dt = make_aware( datetime.combine( dateutil.parser.parse(form_data['date_to']).date() + timedelta(days=1), time(hour=0, minute=0, second=0)), self.timezone) qs = qs.filter( Q(subevent__date_from__lt=dt) | Q(subevent__isnull=True, order__event__date_from__lt=dt)) if form_data.get('order_by') == 'name': qs = qs.order_by('attendee_name_cached', 'order__code') elif form_data.get('order_by') == 'code': qs = qs.order_by('order__code') elif form_data.get('order_by') == 'date': qs = qs.annotate(ed=Coalesce('subevent__date_from', 'order__event__date_from')).order_by( 'ed', 'order__code') elif form_data.get('order_by', '').startswith('name:'): part = form_data['order_by'][5:] qs = qs.annotate(resolved_name=Coalesce( 'attendee_name_parts', 'addon_to__attendee_name_parts', 'order__invoice_address__name_parts')).annotate( resolved_name_part=JSONExtract( 'resolved_name', part)).order_by('resolved_name_part') o = PdfTicketOutput(Event.objects.none()) for op in qs: if not op.generate_ticket: continue if op.order.event != o.event: o = PdfTicketOutput(op.event) with language(op.order.locale, o.event.settings.region): layout = o.layout_map.get((op.item_id, op.order.sales_channel), o.layout_map.get((op.item_id, 'web'), o.default_layout)) outbuffer = o._draw_page(layout, op, op.order) merger.append(ContentFile(outbuffer.read())) outbuffer = BytesIO() merger.write(outbuffer) merger.close() outbuffer.seek(0) if self.is_multievent: return '{}_tickets.pdf'.format(self.events.first( ).organizer.slug), 'application/pdf', outbuffer.read() else: return '{}_tickets.pdf'.format( self.event.slug), 'application/pdf', outbuffer.read()
def exams(request, id=None): # get the available exams courses = get_courses(request) exams = get_exams(request, courses) current_course = False now = make_aware(datetime.datetime.now()) if not request.user.is_authenticated: return error(request, "you are not logged in", 403) # Remove Student from the course if 'x' was clicked by teacher if request.method == "POST" and request.user.profile.role == "T" and request.POST.get( "user_delete"): email = request.POST.get("user_delete") student = User.objects.get(username=email, profile__role='S') courses.get(id=id).students.remove(student) if len(exams.filter(id=id)) != 0: # this id is an exam exam = None try: exam = exams.get(id=id) except: return error(request, "this exam does not exist", 403) submission = None try: submission = Submission.objects.get( exam=exam, student=request.user) # check if submission exists except: pass if request.method == "POST" and request.user.profile.role == "S" and submission == None: # received submission counter = 0 score = 0 max_score = len(exam.questions.all()) choices = [] while True: if ("question_" + str(counter)) in request.POST: current = int(request.POST.get("question_" + str(counter))) if current == exam.questions.all()[counter].correct_answer: score += 1 choices.append(current) counter += 1 else: break if len(choices) != len(exam.questions.all()): return error( request, "there was a problem with your submission. This submission is not counted. Please try again", 403) # save the result percentage = round(score / max_score * 10000) / 100 submission = Submission.objects.create(exam=exam, student=request.user, score=score, max_score=max_score, percentage=percentage) for i in range(len(choices)): answer = Answer.objects.create( question=exam.questions.all()[i], answer=choices[i]) answer.save() submission.answers.add(answer) submission.save() print("-----------------------") print("user", request.user.email) print("score:", score) print("max_score:", max_score) print(choices) print("-----------------------") elif request.method == "POST" and request.user.profile.role == "T": return error( request, "you do not have permission to submit an exam as a teacher", 403) elif request.method == "POST" and submission != None: return error(request, "this exam was already submitted by you", 403) return render(request, 'exam_network/exam.html', { "exam": exam, "submission": submission }) # if the id is course filter exams based on this course try: current_course = courses.get(id=id) exams = exams.filter(course=current_course) except: pass context_dict = { "courses": courses, "exams": exams, "user_role": request.user.profile.role, "current_course": current_course, "now": now } return render(request, 'exam_network/exams.html', context_dict)
def datetime(self): """This is pretty silly, but Django humanize needs a datetime.""" return timezone.make_aware( datetime.combine(self.date, time()), timezone.get_default_timezone(), )
def exam_edit(request, id): if not request.user.is_authenticated: return error(request, "you are not logged in", 403) if request.user.profile.role != "T": return error(request, "you do not have permission to edit this exam", 403) if request.method == "POST": try: title = request.POST.get("title") date_available = get_datetime(request.POST.get("date_available")) deadline = get_datetime(request.POST.get("deadline")) now = make_aware(datetime.datetime.now()) if title == None or title.strip() == "": return error(request, "the exam title is emty", 403) if deadline < date_available: return error( request, "the deadline of the exam is before date available", 403) if deadline < now: return error(request, "the deadline of the exam is in the past", 403) exam = Exam.objects.get(id=id) exam.title = title exam.date_available = date_available exam.deadline = deadline # clear old questions exam.questions.clear() # get new questions counter = 0 while True: if ("question_" + str(counter)) in request.POST: question = Question.objects.create( content=request.POST.get("question_" + str(counter)), choice_0=request.POST.get("answer_" + str(counter) + "_0"), choice_1=request.POST.get("answer_" + str(counter) + "_1"), choice_2=request.POST.get("answer_" + str(counter) + "_2"), choice_3=request.POST.get("answer_" + str(counter) + "_3"), choice_4=request.POST.get("answer_" + str(counter) + "_4"), correct_answer=int( request.POST.get("correct_answer_" + str(counter)))) question.save() exam.questions.add(question) counter += 1 else: break exam.save() return redirect(reverse('exam_network:exams')) except Exception as e: print("exception: ", e) return error( request, "you do not have permission to edit an exam in this course", 403) else: try: exam = Exam.objects.get(id=id) questions = [{ "content": q.content, "correct": q.correct_answer, "0": q.choice_0, "1": q.choice_1, "2": q.choice_2, "3": q.choice_3, "4": q.choice_4, } for q in list(exam.questions.all())] context_dict = { "exam": exam, "available": datetime_str(exam.date_available), "deadline": datetime_str(exam.deadline), "questions": json.dumps(questions), } except: return error(request, 'exam was not found', 403) return render(request, 'exam_network/edit_exam.html', context_dict)
def parse_exif_timestamp(timestamp: str) -> Union[datetime, str]: """Exif timestamp of format""" dt = timezone.datetime.strptime(timestamp, '%Y:%m:%d %H:%M:%S') return timezone.make_aware(dt, timezone.get_default_timezone())