def events(self, future=False): try: events = self._event_methods[self.data_source](self) except (TimeoutError, ConnectionError, LookupError): # if the event source is unreachable or down or data is invalid events = [] if not future: return json.dumps(events) # the methods are assumed to return events latest first, reverse the order tz = pytz.timezone(settings.TIME_ZONE) for event in events: # for future filtering, make sure all events have end times not null try: end = event['end_time'] if not end: event['end_time'] = event['start_time'] except LookupError: event['end_time'] = event['start_time'] # check the datetimes first start = dateparse.parse_datetime(event['start_time']) end = dateparse.parse_datetime(event['end_time']) # linkedevents may not have exact times, parse_datetime may fail # we have to append time, assume server time zone and convert to utc for filtering if not start: start = tz.localize(datetime.combine(dateparse.parse_date(event['start_time']), time())) event['start_time'] = start.astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%SZ') if not end: end = tz.localize(datetime.combine(dateparse.parse_date(event['end_time']), time(23,59,59))) event['end_time'] = end.astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%SZ') # we want the next event first return json.dumps(list(reversed([event for event in events if dateparse.parse_datetime(event['end_time']) > datetime.now(tz)])))
def subscriptions(self, customer, plans, cards): subscriptionsList = customer.op_subscriptions() if subscriptionsList.get('count', 0) > 0: for subscriptionJson in subscriptionsList.get('data', []): try: dbobj = djop.models.Subscription.objects.get( openpay_id=subscriptionJson['id']) except djop.models.Subscription.DoesNotExist: dbobj = djop.models.Subscription( openpay_id=subscriptionJson['id']) finally: dbobj.skip_signal = True dbobj.customer_id = customer.pk dbobj.plan_id = plans[subscriptionJson['plan_id']] dbobj.card_id = cards[subscriptionJson['card']['id']] dbobj.cancel_at_period_end = \ subscriptionJson['cancel_at_period_end'] new_charge_date = parse_date( subscriptionJson['charge_date']) dbobj.latest_charge_date = dbobj.charge_date if \ dbobj.charge_date != new_charge_date else \ dbobj.latest_charge_date dbobj.charge_date = new_charge_date dbobj.period_end_date = parse_date( subscriptionJson['period_end_date']) dbobj.status = subscriptionJson['status'] dbobj.current_period_number = \ subscriptionJson['current_period_number'] dbobj.trial_end_date = parse_date( subscriptionJson['trial_end_date']) dbobj.creation_date = parse_datetime( subscriptionJson['creation_date']) dbobj.save() return subscriptionsList.get('count', 0)
def get_queryset(self): now = timezone.now() start_date = datetime.datetime( year=now.year, month=now.month, day=1, tzinfo=now.tzinfo) if "start" in self.request.GET: start_date = dateparse.parse_date(self.request.GET["start"]) end_date = now if "end" in self.request.GET: end_date = dateparse.parse_date(self.request.GET["end"]) content = Content.objects.filter(published__range=(start_date, end_date)) content_ids = content.values_list("pk", flat=True) contributions = Contribution.objects.filter(content__in=content_ids) ordering = self.request.GET.get("ordering", "content") order_options = { "content": "content__published", "user": "******" } return contributions.order_by(order_options[ordering])
def case_property_range_query(case_property_name, gt=None, gte=None, lt=None, lte=None): """Returns cases where case property `key` fall into the range provided. """ kwargs = {'gt': gt, 'gte': gte, 'lt': lt, 'lte': lte} # if its a number, use it try: # numeric range kwargs = {key: float(value) for key, value in six.iteritems(kwargs) if value is not None} return _base_property_query( case_property_name, queries.range_query("{}.{}.numeric".format(CASE_PROPERTIES_PATH, VALUE), **kwargs) ) except ValueError: pass # if its a date, use it # date range kwargs = { key: parse_date(value) for key, value in six.iteritems(kwargs) if value is not None and parse_date(value) is not None } if not kwargs: raise TypeError() # Neither a date nor number was passed in return _base_property_query( case_property_name, queries.date_range("{}.{}.date".format(CASE_PROPERTIES_PATH, VALUE), **kwargs) )
def test_parse_date(self): # Valid inputs self.assertEqual(parse_date('2012-04-23'), date(2012, 4, 23)) self.assertEqual(parse_date('2012-4-9'), date(2012, 4, 9)) # Invalid inputs self.assertEqual(parse_date('20120423'), None) self.assertRaises(ValueError, parse_date, '2012-04-56')
def handle(self, *args, **options): file_handle = open('contests.json', 'r') contests = json.loads(file_handle.read()) file_handle.close() for contest in contests: new_contest = ac.Contest(pk=contest['id'], name=contest['name']) new_contest.query = contest['params'] new_contest.begin = dateparse.parse_date(contest['begin']) new_contest.end = dateparse.parse_date(contest['end']) results = contest['result'].split(' ') for result in results: if result == 'best_girl': new_contest.best_girl = True if result == 'best_card': new_contest.best_card = True new_contest.save() print("#Create the first contest") contest = ac.Contest() contest.name = 'Who\'s the best girl?' contest.begin = None contest.end = None contest.query = '' contest.best_girl = True contest.best_card = True contest.save()
def school_report(request, school_id, start, end): school = School.objects.get(pk=school_id) start_d = dateparse.parse_date(start) end_d = dateparse.parse_date(end) report_path = "reports/%s/%s_%s_report.pdf" % ( school.slug, start, end ) full_path = settings.MEDIA_ROOT + '/' + report_path full_url = settings.MEDIA_URL + '/' + report_path path = ForkRunR( school.pk, start_d, end_d ) dir_name = os.path.dirname(full_path) if not os.path.exists(dir_name): os.makedirs(dir_name) os.rename(path, full_path) send_mail( "Your report for school %s, date range %s - %s" % ( school, start, end ), "You may download it at http://%s/%s" % ( request.META['HTTP_HOST'], full_url ), settings.SERVER_EMAIL, [request.user.email] ) return HttpResponseRedirect(full_url)
def get_content_value(self, obj): contributions = obj.contributions.distinct() request = self.context.get("request") now = timezone.now() start_date = datetime.datetime( year=now.year, month=now.month, day=1, tzinfo=now.tzinfo ) if "start" in request.QUERY_PARAMS: start_date = dateparse.parse_date(request.QUERY_PARAMS["start"]) end_date = now if "end" in request.GET: end_date = dateparse.parse_date(request.QUERY_PARAMS["end"]) if "contributors" in request.QUERY_PARAMS: contributors = request.QUERY_PARAMS.getlist("contributors") contributions = contributions.filter(contributor__username__in=contributors) contributions = contributions.filter( force_payment=False ) | contributions.filter( payment_date__range=(start_date, end_date) ) total_cost = 0 for contribution in contributions: cost = contribution.get_pay if cost: total_cost += cost return total_cost
def _get_received_at_checker(self, filters, noop_checker): def parse_date(date): parsed_date = parse_datetime(date) if parsed_date is not None: return parsed_date for date_format in settings.DATETIME_INPUT_FORMATS: try: return datetime.datetime.strptime(date, date_format) except (ValueError, TypeError): continue raise ValueError('Cannot parse date %s' % date) received_at__gte, received_at__lt = filters.get('received_at__gte'), filters.get('received_at__lt') received_at__gte = parse_date(received_at__gte) if received_at__gte else None received_at__lt = parse_date(received_at__lt) if received_at__lt else None if received_at__gte and received_at__gte.tzinfo is None: received_at__gte = timezone.make_aware(received_at__gte) if received_at__lt and received_at__lt.tzinfo is None: received_at__lt = timezone.make_aware(received_at__lt) if received_at__gte and received_at__lt: return lambda c: received_at__gte <= c.received_at < received_at__lt elif received_at__gte: return lambda c: received_at__gte <= c.received_at elif received_at__lt: return lambda c: c.received_at < received_at__lt return noop_checker
def tournament(request, tournament_id): info = api_tournamentbyid(tournament_id) if (u'error_message' in info): errmsg = 'error retrieving tournament with id {0} from leaguevine: {1}'.format(tournament_id, info['error_message']) return render_to_response('error.html', {'error': errmsg}) info[u'start_datetime'] = parse_date(info['start_date']) info[u'end_datetime'] = parse_date(info['end_date']) # retrieve all games of this tournament spirit = api_spiritbytournament(tournament_id) games = api_gamesbytournament(tournament_id) if (u'error_message' in spirit): errmsg = '{0}'.format(spirit['error_message']) return render_to_response('error.html', {'error': errmsg}) # remove BYE games, as those are irrelevant for spirit scores: games['objects'] = [g for g in games['objects'] if g['team_2_id']] user_id = request.session.get('user_id', None) user_first_name = request.session.get('user_first_name', None) user_teamids = request.session.get('user_teamids', []) for g in games['objects']: g['datetime'] = parse_datetime(g['start_time']) g['team_1_spirit_editable'] = g['team_2_id'] in user_teamids g['team_2_spirit_editable'] = g['team_1_id'] in user_teamids # compute spirit score overview teams, games_wspirit = TeamsFromGames(spirit['objects'], games['objects']) return render_to_response('tournament.html', {'user_first_name': user_first_name, 'id': tournament_id, 'games': games_wspirit, 'teams': teams, 'info': info})
def run_command(self, date): with patch('diary.models.SurveyDate') as mock: patched_date = mock.return_value patched_date.now.return_value = dateparse.parse_date(date) patched_date.get_start_date.return_value = dateparse.parse_date('2014-01-23') rm = ReminderManager() rm.send_registration_email()
def get_queryset(self): now = timezone.now() start_date = datetime.datetime( year=now.year, month=now.month, day=1, tzinfo=now.tzinfo) if "start" in self.request.GET: start_date = dateparse.parse_date(self.request.GET["start"]) end_date = now if "end" in self.request.GET: end_date = dateparse.parse_date(self.request.GET["end"]) content = Content.objects.filter(published__range=(start_date, end_date)) if "feature_types" in self.request.QUERY_PARAMS: feature_types = self.request.QUERY_PARAMS.getlist("feature_types") content = content.filter(feature_type__slug__in=feature_types) if "tags" in self.request.QUERY_PARAMS: tags = self.request.QUERY_PARAMS.getlist("tags") content = content.filter(tags__slug__in=tags) content_ids = content.values_list("pk", flat=True) contributions = Contribution.objects.filter(content__in=content_ids) include, exclude = get_forced_payment_contributions(start_date, end_date, qs=contributions) include_ids = include.values_list('pk', flat=True).distinct() exclude_ids = exclude.values_list('pk', flat=True).distinct() contributions = contributions.exclude( pk__in=exclude_ids ) | Contribution.objects.filter( pk__in=include_ids ) if "contributors" in self.request.QUERY_PARAMS: contributors = self.request.QUERY_PARAMS.getlist("contributors") contributions = contributions.filter(contributor__username__in=contributors) if "staff" in self.request.QUERY_PARAMS: staff = self.request.QUERY_PARAMS.get("staff") if staff == "freelance": contributions = contributions.filter( contributor__freelanceprofile__is_freelance=True ) elif staff == "staff": contributions = contributions.filter( contributor__freelanceprofile__is_freelance=False ) ordering = self.request.GET.get("ordering", "content") order_options = { "content": "content__published", "user": "******" } return contributions.order_by(order_options[ordering])
def test_command_does_not_run_if_not_sunday(self): with self.assertRaisesRegexp(CommandError, 'Sunday'): with patch('diary.models.SurveyDate') as mock: patched_date = mock.return_value patched_date.now.return_value = dateparse.parse_date('2014-01-23') patched_date.get_start_date.return_value = dateparse.parse_date('2014-01-23') rm = ReminderManager() rm.send_second_reminder_email()
def get_queryset(self): now = timezone.now() start_date = datetime.datetime( year=now.year, month=now.month, day=1, tzinfo=now.tzinfo ) if "start" in self.request.GET: start_date = dateparse.parse_date(self.request.GET["start"]) end_date = now if "end" in self.request.GET: end_date = dateparse.parse_date(self.request.GET["end"]) contribution_qs = Contribution.objects.all() if "feature_types" in self.request.QUERY_PARAMS: feature_types = self.request.QUERY_PARAMS.getlist("feature_types") # content = content.filter(feature_type__slug__in=feature_types) contribution_qs = contribution_qs.filter(content__feature_type__slug__in=feature_types) if "tags" in self.request.QUERY_PARAMS: tags = self.request.QUERY_PARAMS.getlist("tags") # content = content.filter(tags__slug__in=tags) contribution_qs = contribution_qs.filter(content__tags__slug__in=tags) include, exclude = get_forced_payment_contributions( start_date, end_date, qs=contribution_qs ) include_ids = include.values_list('pk', flat=True).distinct() exclude_ids = exclude.values_list('pk', flat=True).distinct() contribution_qs = contribution_qs.filter( content__published__range=(start_date, end_date) ) | Contribution.objects.filter( pk__in=include_ids ) contribution_qs = contribution_qs.exclude(pk__in=exclude_ids).distinct() if "contributors" in self.request.QUERY_PARAMS: contributors = self.request.QUERY_PARAMS.getlist("contributors") contribution_qs = contribution_qs.filter(contributor__username__in=contributors) contributor_ids = contribution_qs.values_list('contributor', flat=True).distinct() qs = FreelanceProfile.objects.filter(contributor__in=contributor_ids) if "staff" in self.request.QUERY_PARAMS: staff = self.request.QUERY_PARAMS.get("staff") if staff == "freelance": qs = qs.filter(is_freelance=True) elif staff == "staff": qs = qs.filter(is_freelance=False) return qs
def end_date(self): value = self.getData() if dateparse.parse_datetime(value) is not None: self.addObject(dateparse.parse_datetime(value)) elif dateparse.parse_date(value) is not None: self.addObject(dateparse.parse_date(value)) elif dateparse.parse_time(value) is not None: self.addObject(dateparse.parse_time(value)) else: raise ValueError('"{0}" is not a valid datetime'.format(value))
def filter_queryset(self, request, queryset, view): start = request.QUERY_PARAMS.get("start", None) if start: start_date = dateparse.parse_date(start) queryset = self.filter_start(queryset, view, start_date) end = request.QUERY_PARAMS.get("end", None) if end: end_date = dateparse.parse_date(end) queryset = self.filter_end(queryset, view, end_date) return queryset
def test_start_date_gets_nearest_thursday(self): sd = SurveyDate() today = dateparse.parse_date('2014-01-23') self.assertEquals(sd.get_start_date(today).isoformat(), '2014-01-23') today = dateparse.parse_date('2014-01-20') self.assertEquals(sd.get_start_date(today).isoformat(), '2014-01-23') today = dateparse.parse_date('2014-01-19') self.assertEquals(sd.get_start_date(today).isoformat(), '2014-01-16')
def get_start_and_end_dates(request): start_date = request.GET.get('start_date', None) end_date = request.GET.get('end_date', None) if start_date and end_date: start_date = parse_date(start_date) end_date = parse_date(end_date) else: today = datetime.date.today() start_date = today - datetime.timedelta(today.weekday()) end_date = start_date + datetime.timedelta(6) return start_date, end_date
def _filter_populate(self, q_params, neededValues): result = {} for param in neededValues: perhaps_value = q_params.get(param[0], None) if perhaps_value: if len(param) > 2 and param[2] == "amint": result[param[1]] = datetime.combine(dateparse.parse_date(perhaps_value), datetime.min.time()) elif len(param) > 2 and param[2] == "amaxt": result[param[1]] = datetime.combine(dateparse.parse_date(perhaps_value), datetime.max.time()) else: result[param[1]] = perhaps_value return result
def get_value(self, condition, value, request=None): if isinstance(value, six.string_types): value = parse_date(value) elif isinstance(value, (list, tuple)): _value = list(set([ parse_date(v) for v in value ])) if len(_value) == len(value): value = _value else: value = None if value is None: raise ValueError('One or more values not valid in `%s` filter.' % force_text(self)) if condition == 'range': value = [min(value), max(value)] return value
def test_no_reminder_sent_before_startdate(self): startdate = '2014-01-23' u = User(email='*****@*****.**', startdate=startdate) u.save() with patch('diary.models.SurveyDate') as mock: patched_date = mock.return_value patched_date.now.return_value = dateparse.parse_date('2014-01-25') patched_date.get_start_date.return_value = dateparse.parse_date('2014-01-16') rm = ReminderManager() rm.send_second_reminder_email() self.assertEqual(len(mail.outbox), 0)
def server_dt(dt): formats = ['%d.%m.%Y'] def parse(d): for f in formats: try: return datetime.datetime.strptime(d, f) except: continue tz = timezone.get_default_timezone() or pytz.timezone(settings.TIMEZONE) print("tz=%s, %s" % (tz, dt)) v = parse_datetime(dt) or (parse_date(dt) and datetime.datetime.combine(parse_date(dt), datetime.time.min)) or parse(dt) return timezone.make_aware(v, tz)
def test_get_filtered_class_root_view(self): """ GET requests to filtered ListCreateAPIView that have a filter_class set should return filtered results. """ view = FilterClassRootView.as_view() # Basic test with no filter. request = factory.get('/') response = view(request).render() assert response.status_code == status.HTTP_200_OK assert response.data == self.data # Tests that the decimal filter set with 'lt' in the filter class works. search_decimal = Decimal('4.25') request = factory.get('/', {'decimal': '%s' % search_decimal}) response = view(request).render() assert response.status_code == status.HTTP_200_OK expected_data = [f for f in self.data if Decimal(f['decimal']) < search_decimal] assert response.data == expected_data # Tests that the date filter set with 'gt' in the filter class works. search_date = datetime.date(2012, 10, 2) request = factory.get('/', {'date': '%s' % search_date}) # search_date str: '2012-10-02' response = view(request).render() assert response.status_code == status.HTTP_200_OK expected_data = [f for f in self.data if parse_date(f['date']) > search_date] assert response.data == expected_data # Tests that the text filter set with 'icontains' in the filter class works. search_text = 'ff' request = factory.get('/', {'text': '%s' % search_text}) response = view(request).render() assert response.status_code == status.HTTP_200_OK expected_data = [f for f in self.data if search_text in f['text'].lower()] assert response.data == expected_data # Tests that multiple filters works. search_decimal = Decimal('5.25') search_date = datetime.date(2012, 10, 2) request = factory.get('/', { 'decimal': '%s' % (search_decimal,), 'date': '%s' % (search_date,) }) response = view(request).render() assert response.status_code == status.HTTP_200_OK expected_data = [f for f in self.data if parse_date(f['date']) > search_date and Decimal(f['decimal']) < search_decimal] assert response.data == expected_data
def handle(self, *args, **options): league = models.League() league.name = 'Beach Volleyball' league.sport = 'volleyball' league.city = 'Austin' league.state = 'TX' league.country = 'US' league.start_date = dateparse.parse_date('2016-06-15') league.end_date = dateparse.parse_date('2016-08-15') league.description = 'Advanced beach volleyball played Wednesday nights at Zilker park' league.difficulty = 'advanced' league.status = 'open' league.team_max = 10 league.save()
def test_sends_first_reminder_if_first_week(self): startdate = '2014-01-23' u = User(email='*****@*****.**', startdate=startdate) u.save() with patch('diary.models.SurveyDate') as mock: patched_date = mock.return_value patched_date.now.return_value = dateparse.parse_date('2014-01-25') patched_date.get_start_date.return_value = dateparse.parse_date('2014-01-23') rm = ReminderManager() rm.send_second_reminder_email() self.assertEqual(len(mail.outbox), 1) self.assertRegexpMatches(mail.outbox[0].body, 'You recently registered to complete an online diary');
def from_native(self, value): if value in validators.EMPTY_VALUES: return None if isinstance(value, datetime.datetime): if timezone and settings.USE_TZ and timezone.is_aware(value): # Convert aware datetimes to the default time zone # before casting them to dates (#17742). default_timezone = timezone.get_default_timezone() value = timezone.make_naive(value, default_timezone) return value.date() if isinstance(value, datetime.date): return value for format in self.input_formats: if format.lower() == ISO_8601: try: parsed = parse_date(value) except (ValueError, TypeError): pass else: if parsed is not None: return parsed else: try: parsed = datetime.datetime.strptime(value, format) except (ValueError, TypeError): pass else: return parsed.date() msg = self.error_messages["invalid"] % readable_date_formats(self.input_formats) raise ValidationError(msg)
def test_get_filtered_fields_root_view(self): """ GET requests to paginated ListCreateAPIView should return paginated results. """ view = FilterFieldsRootView.as_view() # Basic test with no filter. request = factory.get('/') response = view(request).render() assert response.status_code == status.HTTP_200_OK assert response.data == self.data # Tests that the decimal filter works. search_decimal = Decimal('2.25') request = factory.get('/', {'decimal': '%s' % search_decimal}) response = view(request).render() assert response.status_code == status.HTTP_200_OK expected_data = [f for f in self.data if Decimal(f['decimal']) == search_decimal] assert response.data == expected_data # Tests that the date filter works. search_date = datetime.date(2012, 9, 22) request = factory.get('/', {'date': '%s' % search_date}) # search_date str: '2012-09-22' response = view(request).render() assert response.status_code == status.HTTP_200_OK expected_data = [f for f in self.data if parse_date(f['date']) == search_date] assert response.data == expected_data
def plan_by_week(request): week_offset = int(request.GET.get('week', 0)) start_date = parse_date(request.GET.get('start_date', str(datetime.date.today()))) date = start_date + datetime.timedelta(week_offset * 7) start_week = date - datetime.timedelta(date.weekday()) end_week = start_week + datetime.timedelta(6) grid = [] week_cursor = start_week while week_cursor <= end_week: plans_for_day = PlanForDay.objects.filter(date=week_cursor) grid.append({'date': week_cursor, 'plans': plans_for_day}) week_cursor += datetime.timedelta(1) return render( request, 'week_plan/list.html', { 'grid': grid, 'next_week': week_offset + 1, 'prev_week': week_offset - 1, 'start_week': start_week, 'end_week': end_week, 'start_date': formats.date_format(start_date, 'Y-m-d'), } )
def post(self, request): birthday = request.POST.get('birthday', None) if birthday is not None: user = request.user user.birthday = parse_date(birthday) user.save() return redirect('/profile')
def get_employee_bisy(request, employee_id): try: date = parse_date(request.GET.get('date', str(datetime.date.today()))) start_week = date - datetime.timedelta(date.weekday()) end_week = start_week + datetime.timedelta(6) # подготовка структурированного словаря с рабочими сменами work_shifts = WorkShift.objects.filter( date__range=[start_week, end_week], employee=employee_id, special_config=WorkShift.SPECIAL_CONFIG_EMPLOYEE ).order_by('date') work_shifts_struct = {} for work_day in work_shifts: date_format = formats.date_format(work_day.date, "d.m.Y") if date_format not in work_shifts_struct: work_shifts_struct[date_format] = [] work_shifts_struct[date_format].append(work_day.club.name) grid = [] week_cursor = start_week while week_cursor <= end_week: date_cursor = formats.date_format(week_cursor, "d.m.Y") names = '' if date_cursor in work_shifts_struct: names = "\n".join(work_shifts_struct[date_cursor]) grid.append(names) week_cursor += datetime.timedelta(1) table = render_to_string('calendar/_employee_bisy.html', {'grid': grid}) return JsonResponse({'complete': table}) except Exception as e: print(e) return JsonResponse({'complete': 0})
def grab_person_memberships(self, person_id): # this grabs a person and all their memberships url = base_url + '/' + person_id + '/' r = requests.get(url) page_json = json.loads(r.text) # TO DO: handle updating people & memberships person = Person.objects.filter(ocd_id=person_id).first() if not person: # save image to disk if page_json['image']: r = requests.get(page_json['image'], verify=False) if r.status_code == 200: with open( (settings.HEADSHOT_PATH + page_json['id'] + ".jpg"), 'wb') as f: for chunk in r.iter_content(1000): f.write(chunk) f.flush() email = '' for contact_detail in page_json['contact_details']: if contact_detail['type'] == 'email': if contact_detail['value'] != 'mailto:': email = contact_detail['value'] website_url = '' for link in page_json['links']: if link['note'] == "web site": website_url = link['url'] try: person = Person.objects.create( ocd_id=page_json['id'], name=page_json['name'], headshot=page_json['image'], source_url=page_json['sources'][0]['url'], source_note=page_json['sources'][0]['note'], website_url=website_url, email=email, slug=slugify(page_json['name']), ) except IntegrityError: ocd_id_part = page_json['id'].rsplit('-', 1)[1] person = Person.objects.create( ocd_id=page_json['id'], name=page_json['name'], headshot=page_json['image'], source_url=page_json['sources'][0]['url'], source_note=page_json['sources'][0]['note'], website_url='', email=email, slug=slugify(page_json['name']) + ocd_id_part, ) # if DEBUG: # print(' adding person: %s' % person.name) if DEBUG: print('\u263A', end=' ', flush=True) for membership_json in page_json['memberships']: if membership_json['post']: post = Post.objects.filter( ocd_id=membership_json['post']['id']).first() else: post = None organization = Organization.objects.filter( ocd_id=membership_json['organization']['id']).first() # adding republican or democratic party when encountered # b/c parties are not added when organizations are loaded (in # grab_organizations) if not organization and membership_json['organization'][ 'name'] in ['Republican', 'Democratic']: self.grab_organization_posts( {'id': membership_json['organization']['id']}) organization = Organization.objects.filter( ocd_id=membership_json['organization']['id']).first() try: end_date = parse_date(membership_json['end_date']) except: end_date = None try: start_date = parse_date(membership_json['start_date']) except: start_date = None try: obj = Membership.objects.get(_organization=organization, _person=person, _post=post) created = False obj.label = membership_json['label'] obj.role = membership_json['role'] obj.start_date = start_date obj.end_date = end_date obj.save() except Membership.DoesNotExist: obj = Membership.objects.create(_organization=organization, _person=person, _post=post, label=membership_json['label'], role=membership_json['role'], start_date=start_date, end_date=end_date) created = True except Membership.MultipleObjectsReturned: memberships = Membership.objects.filter( _organization=organization, _post=post, _person=person) for membership in memberships[1:]: membership.delete() obj = Membership.objects.get(_organization=organization, _person=person, _post=post) created = False obj.label = membership_json['label'] obj.role = membership_json['role'] obj.start_date = start_date obj.end_date = end_date obj.save()
def handle(self, *args: Any, **kwargs: Any) -> None: since = dateparse.parse_date(kwargs['since']) print(stripewrapper.map_payment_intents(self._sync_payment_intent, since=since))
def date(self): return parse_date(self.request.data['date'])
def getStreaks(order, today): # At first I get all the times the habit was repeated from the order.checkedList of the Order.object. date_array = list(order.checkedList.all()) # "list_of_days_since_first_repeat" saves the dates with all the dates from the very first one ever to today. list_of_days_since_first_repeat = [] # "list_of_repeat_days" saves the dates which were checked list_of_repeat_days = [] # "week_habit" appends all weeks which passed since the first time anything was checked. week_habit = [] # the first week is initalized today. week_habit_date = today # Below we get all the Repeats ever created. repeats = Repeats.objects.all() # first_time_stamp is first set to "None" in order not to confuse the system in case there are no repeats-objects to it yet, because # it hasen't been checked yet. first_time_stamp = None # Earliest throws an exception if there are no Repeats. This is why below there is the try-except block. try: first_time_stamp = repeats.earliest('dateAsString') except: pass # That is done below in order to avoid none in the queryset if first_time_stamp: # first_repeats is really just the first_time_stamp just parsed to be a date and not a string. first_repeats = parse_date(first_time_stamp.dateAsString) # If today is acutally todays's date then lastRepats stands for yesterday. We do this so that the current streak # is not automatically 0 if you haven't pressed it today last_repeats = today - timedelta(days=1) # time_stamp_deltas gives us a timedelta-object from yesterday to the first time anything was ever checked. time_stamp_deltas = last_repeats - first_repeats # In this the list_of_days_since_first_repeat is produced. for k in range(time_stamp_deltas.days + 1): time_stamp_day = first_repeats + timedelta(days=k) list_of_days_since_first_repeat.append(time_stamp_day) # I start today and walk backwards. Here I get all the weeks. Regardless if they were checked or not. while week_habit_date > first_repeats: # 7 days are subtracted from today week_habit_date -= timedelta(days=7) # This week is not included, in order to be a streak, even if this week it wasn't checked yet. week_habit.append(week_habit_date) # Weekdays are received in reverse order. week_habit.reverse() # date_array is an array which has all the days which were checked for repeat in date_array: repeated_days = parse_date(repeat.dateAsString) list_of_repeat_days.append(repeated_days) # The data type "Set" gets rid of all duplicates. checked_days_array = set(list_of_repeat_days) # inCheckedDays is there in order not to need exact matches. The timedelta establishes the week in the future. def inCheckedDays(x, checked_days): # x is whatever day of the week today is. for i in checked_days: # The line below determines whether i is contained in the week starting at x. # That is determined by wheater i is on or after x and before the week after x. if x <= i < x + timedelta(days=7): return True return False def tryingWeekly(a, x): count_current_before, longest_streak_before = a count_current_after = count_current_before + 1 # x in this case is the same weekday of whatever weekday is "today". x will eventually take all the values of # each of the current weekday in each week from today to the date of the first repeat except for this week. if inCheckedDays(x, checked_days_array): return (count_current_after, count_current_after if count_current_after > longest_streak_before else longest_streak_before) else: return (0, longest_streak_before) def tryingDaily(a, x): # a is a tuple count_current_before, longest_streak_before = a # if x (one date in checked_days_array) is in the checked days the current streak will be updated by 1, else it will be set to 0 count_current_after = count_current_before + 1 if x in checked_days_array else 0 return (count_current_after, count_current_after if count_current_after > longest_streak_before else longest_streak_before) # Initial=(0,0) stands for the tuple a that is being initialized with 0,0 result = list( accumulate(list_of_days_since_first_repeat, tryingDaily, initial=(0, 0))) if order.interval == "Daily" else list( accumulate(week_habit, tryingWeekly, initial=(0, 0))) # [-1] is for the last tuple. The second [] stands for either the currentStreak [0] or the longestStreak[1]. # I used accumulate insted of functools reduce to help debug the result and to grasp the intermediate steps. order.longestStreak = result[-1][1] order.streak = result[-1][0] order.save()
def check_date_range(request): ''' 用于middleware,检查时间范围是否在一个学期内 1. 如果是start_date和end_date: 1.1 start_date在某个学期内,记为t1 start_date不在学期内,就查找离它最近的下一个学期t1 如果没有符合条件的t1,报错退出 1.2 end_date在某个学期内,记为t2 end_date不在学期内,就查找离它最近的前一个学期t2 如果没有符合条件的t2,报错退出 1.3 如果t1 == t2,return t1 否则报错退出 2. 如果是school_year和term_type,直接获取对应的学期,或者报错退出 3. 如果server_type是区县市级,就从NewTerm检查 4. TODO 如果server_type是地市州,怎么处理? ''' start_date = request.GET.get('start_date') end_date = request.GET.get('end_date') school_year = request.GET.get('school_year') term_type = request.GET.get('term_type') server_type = models.Setting.getvalue('server_type') # o = models.Setting.objects.get(name='server_type') # 测试表锁定的问题 if start_date and end_date: s = parse_date(start_date) e = parse_date(end_date) try: if server_type == 'school': t1 = models.Term.objects.get(start_date__lte=s, end_date__gte=s) elif server_type == 'country': t1 = models.NewTerm.objects.get(start_date__lte=s, end_date__gte=s) elif not server_type: return u'获取服务器参数失败' except: if server_type == 'school': t1 = models.Term.objects.filter(start_date__gte=s) elif server_type == 'country': t1 = models.NewTerm.objects.filter(start_date__gte=s) t1 = t1.order_by('start_date') if t1.exists(): t1 = t1[0] else: # return u'开始时间(%s)不在任何学期内' % start_date return u'开始时间(%s)后没有可用学年学期' % start_date try: if server_type == 'school': t2 = models.Term.objects.get(start_date__lte=e, end_date__gte=e) elif server_type == 'country': t2 = models.NewTerm.objects.get(start_date__lte=e, end_date__gte=e) except: if server_type == 'school': t2 = models.Term.objects.filter(end_date__lte=e) elif server_type == 'country': t2 = models.NewTerm.objects.filter(end_date__lte=e) t2 = t2.order_by('-end_date') if t2.exists(): t2 = t2[0] else: # return u'结束时间(%s)不在任何学期内' % end_date return u'结束时间(%s)前没有可用学年学期' % end_date if t1 == t2: return t1 else: if t1.end_date < t2.start_date: return u'查询时间范围仅限单个学期时间段内' else: return u'查询时间范围内无可用学年学期' elif school_year and term_type: try: if server_type == 'school': t = models.Term.objects.get(school_year=school_year, term_type=term_type) elif server_type == 'country': t = models.NewTerm.objects.get(school_year=school_year, term_type=term_type) return t except: return u'查询的学年学期(%s%s)不存在' % (school_year, term_type) else: return u'请选择时间范围或学年学期'
def get(self, request): date_args = request.GET.get('date', '') if not date_args: return HttpResponse( renderers.JSONRenderer().render({'message': u'date必传', 'code': -2}, 'application/json')) try: start_time = dateparse.parse_date(date_args) if not start_time: return HttpResponse( renderers.JSONRenderer().render({'message': u'错误的date', 'code': -1}, 'application/json')) except: return HttpResponse( renderers.JSONRenderer().render({'message': u'错误的date', 'code': -1}, 'application/json')) p2pproducts = P2PProduct.objects.filter(hide=False) \ .filter(status=u'正在招标').filter(publish_time__gte=start_time) p2p_list = [] for p2p in p2pproducts: amount = Decimal.from_float(p2p.total_amount).quantize(Decimal('0.00')) percent = p2p.ordered_amount / amount * 100 fld_lend_progress = percent.quantize(Decimal('0.0'), 'ROUND_DOWN') fld_awards = 0 fld_interest_year = Decimal.from_float(p2p.expected_earning_rate) if p2p.activity: fld_awards = 1 fld_interest_year += p2p.activity.rule.rule_amount * 100 p2pequity_count = p2p.equities.all().count() temp_p2p = { 'fld_proid': p2p.id, "fld_proname": p2p.name, "fld_name": u'网利宝', "fld_finstarttime": timezone.localtime(p2p.publish_time).strftime("%Y-%m-%d %H:%M:%S"), "fld_finendtime": timezone.localtime(p2p.end_time).strftime("%Y-%m-%d %H:%M:%S"), "fld_total_finance": p2p.total_amount, "fld_lend_period": p2p.period * 30, "fld_interest_year": float(fld_interest_year.quantize(Decimal('0.0'))), "fld_refundmode": p2p.pay_method, "fld_loantype_name": u'第三方担保', "fld_guarantee_org": p2p.warrant_company.name, "fld_securitymode_name": u'本息保障', "fld_mininvest": 100.0, "fld_awards": fld_awards, "fld_lend_progress": fld_lend_progress, "fld_invest_number": p2pequity_count, "fld_finance_left": p2p.total_amount - p2p.ordered_amount, "fld_lendname": p2p.borrower_name, "fld_lendway": p2p.short_usage, "fld_netaddress": 'https://{}/p2p/detail/{}?promo_token=hexunw'.format(request.get_host(), p2p.id), "fld_status": 1, "fld_status_name": u'筹款中' } p2p_list.append(temp_p2p) result = { "data": { "list": p2p_list, "version": "", "status": "", "msg": "" } } return HttpResponse(renderers.JSONRenderer().render(result, 'application/json'))
def export_orders_xls(request, date): if parse_date(date) is None: numbers = date.split('-') date = datetime.date(int(numbers[2]), int(numbers[1]), int(numbers[0])) else: date = parse_date(date) response = HttpResponse(content_type='application/ms-excel') response[ 'Content-Disposition'] = f'attachment; filename="orders-{date}.xls"' wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet('Orders') row_num = 0 font_style = xlwt.XFStyle() font_style.font.bold = True columns = ['Sıra No', 'Sipariş Kodu', 'Ad Soyad', 'Telefon', ' Adres', 'Tavuk', 'Yumurta', \ 'Süt', 'Tereyağ', 'Peynir', 'Sucuk', 'Diğer', 'Toplam Tutar', 'Ödeme Şekli', \ 'Notlar' ] for col_num in range(len(columns)): ws.write(row_num, col_num, columns[col_num], font_style) font_style = xlwt.XFStyle() orders = Order.objects.filter( delivery_date=date).order_by('customer__address__district__name') categories = Category.objects.all() products = Product.objects.all() i = 0 for category in categories: number_of_order_items = [] col_num = 5 for product in products.filter(category=category): total = 0 for item in product.orderitem_set.all().filter( order_item__delivery_date=date): total += item.quantity number_of_order_items.append( f"{str(Decimal(total))} x {product.name} \n") ws.write(row_num + 2, col_num + i, number_of_order_items, font_style) i += 1 total_amount = 0 for order in orders: total_amount += order.total_price ws.write(row_num + 2, 12, total_amount, font_style) row_num = 3 col_num = 0 for order in orders: row_num += 1 col_num = 0 ws.write(row_num, col_num, f"{row_num-3}", font_style) ws.write(row_num, col_num + 1, order.nick, font_style) ws.write( row_num, col_num + 2, f"{order.customer.first_name.upper()} {order.customer.last_name.upper()}", font_style) ws.write(row_num, col_num + 3, order.customer.phone1, font_style) ws.write(row_num, col_num + 4, order.customer.address.get_full_address().upper(), font_style) tavuk, yumurta, süt, tereyağ, peynir, sucuk, diğer = "", "", "", "", "", "", "" for item in order.items.all(): if item.product.category.name == "Tavuk": tavuk += (item.product.name + "\n") * int(item.quantity) elif item.product.category.name == "Yumurta": yumurta += str(Decimal(item.quantity)) elif item.product.category.name == "Süt": süt += (item.product.name + "\n") * int(item.quantity) elif item.product.category.name == "Tereyağ": tereyağ += (item.product.name) + " " + str( Decimal(item.quantity)) + "\n" elif item.product.category.name == "Peynir": peynir += (item.product.name + "\n") * int(item.quantity) elif item.product.category.name == "Sucuk": sucuk += f"{Decimal(item.quantity)} {item.product.distribution_unit}" elif item.product.category.name == "Diğer": diğer += (item.product.name) + " " + str(Decimal( item.quantity)) + "\n" notes = order.notes if order.is_instagram: notes += "\nKullanıcı Adı:" + order.instagram_username if len(order.customer.order_set.all()) == 1: notes += "***İlk Sipariş***" ws.write(row_num, col_num + 5, tavuk.upper(), font_style) ws.write(row_num, col_num + 6, yumurta.upper(), font_style) ws.write(row_num, col_num + 7, süt.upper(), font_style) ws.write(row_num, col_num + 8, tereyağ.upper(), font_style) ws.write(row_num, col_num + 9, peynir.upper(), font_style) ws.write(row_num, col_num + 10, sucuk.upper(), font_style) ws.write(row_num, col_num + 11, diğer.upper(), font_style) ws.write(row_num, col_num + 12, order.total_price, font_style) ws.write(row_num, col_num + 13, "", font_style) ws.write(row_num, col_num + 14, notes.upper(), font_style) wb.save(response) return response
def upload_csv(request): if "GET" == request.method: return HttpResponseRedirect(reverse("contactos")) user = request.user csv_file = request.FILES["csv_file"] if not csv_file.name.endswith('.csv'): messages.error(request, 'El archivo no es un csv') return HttpResponseRedirect(reverse("ver_importador")) if csv_file.multiple_chunks(): messages.error(request, 'El archivo es muy grande') return HttpResponseRedirect(reverse("ver_importador")) file_data = csv_file.read().decode("utf-8") lineas = file_data.split("\n") total_contacts = 0 failed_contacts = 0 for linea in lineas: total_contacts += 1 try: fields = linea.split(",") nombre = fields[CSV_NOMBRE_INDEX] apellido = fields[CSV_APELLIDO_INDEX] documento = fields[CSV_DOCUMENTO_INDEX] cargo = fields[CSV_CARGO_INDEX] ocupacion = fields[CSV_OCUPACION_INDEX] calle = fields[CSV_CALLE_INDEX] numero = fields[CSV_NUMERO_INDEX] ciudad = fields[CSV_CIUDAD_INDEX] cod_postal = fields[CSV_COD_POSTAL_INDEX] pais = fields[CSV_PAIS_INDEX] fecha_nacimiento = fields[CSV_FECHA_NACIMIENTO_INDEX] tipo = fields[CSV_TIPO_INDEX] email = fields[CSV_EMAIL_INDEX] email_alternativo = fields[CSV_EMAIL_ALTERNATIVO_INDEX] telefono = fields[CSV_TELEFONO_INDEX] movil = fields[CSV_MOVIL_INDEX] recibir_novedades = fields[CSV_RECIBIR_NOVEDADES_INDEX] observaciones = fields[CSV_OBSERVACIONES_INDEX] es_voluntario = fields[CSV_ES_VOLUNTARIO_INDEX] turno = fields[CSV_TURNO_INDEX] estado = fields[CSV_ESTADO_INDEX] habilidades = fields[CSV_HABILIDADES_INDEX] categoria = fields[CSV_CATEGORIA_INDEX] nombre_cuenta = fields[CSV_NOMBRE_CUENTA_INDEX] origen = fields[CSV_NOMBRE_ORIGEN_INDEX] sexo = fields[CSV_GENERO_INDEX] id_listado_cuentas = Cuenta.objects.filter( organizacion__usuario=user).filter( nombre=nombre_cuenta).values_list('id', flat=True) print(id_listado_cuentas) cuenta = None if not id_listado_cuentas: cuenta = Cuenta.objects.create(organizacion=user.organizacion, nombre=nombre_cuenta, email=email_alternativo) else: cuenta = Cuenta.objects.filter(id=id_listado_cuentas[0])[0] tipoCustom = CampoCustomTipoContacto.objects.filter( organizacion__usuario=user).filter(tipo=tipo) if not tipoCustom: categoria = CampoCustomTipoContacto.objects.create( organizacion=user.organizacion, tipo=tipo) else: categoria = tipoCustom[0] campoOrigen = CampoCustomOrigen.objects.filter( organizacion__usuario=user).filter(origen=origen) if not campoOrigen: origen = CampoCustomOrigen.objects.create( organizacion=user.organizacion, origen=origen) else: origen = campoOrigen[0] if turno == "Mañana": turno = 0 else: turno = 1 if estado == "Activo": estado = 1 else: estado = 0 contacto = Contacto( cuenta=cuenta, nombre=nombre, apellido=apellido, email=email, tipo=0, categoria=categoria, documento=documento, cargo=cargo, ocupacion=ocupacion, direccion=calle + " " + numero, ciudad=ciudad, pais=pais, cod_postal=cod_postal, email_alternativo=email_alternativo, observaciones=observaciones, movil=movil, origen=origen, habilidades=3, turno=turno, estado=estado, es_voluntario=True, sexo=sexo, telefono=telefono, fecha_de_nacimiento=parse_date(fecha_nacimiento)) contacto.save() except Exception as e: print("Error cargando un usuario: " + linea) print(e) failed_contacts += 1 messages.error( request, "Se importaron {} de {} contactos".format( total_contacts - failed_contacts, total_contacts)) return HttpResponseRedirect(reverse("contactos"))
def convert_datefield_value(self, value, expression, connection): if value is not None: if not isinstance(value, datetime.date): value = parse_date(value) return value
def test_usupported_date(self): t1 = "foo" t2 = parse_date("1900-01-01") self.assertRaisesRegex(ValueError, "Expecting a datetime value", image_timedelta, t1, t2)
def _validate_part(name, param, value): """ Validate a parameter, or an item in a parameter. Items can have the same validation requirements as parameters, possibly recursively, so we handle that possible recursion, as well as all validation, here. Validations we do: * Integers must be able to be converted into an integer. * Enums must have the value in the parameter's enum list. """ # Parse arrays by splitting them and recursively validating the values. if param.type == openapi.TYPE_ARRAY: # Split value into parts and recursively validate them collection_format = getattr(param, 'collectionFormat', None) if not collection_format: raise InvalidParameterDefinition( "Array parameter collection format not defined" ) splitter_for = {'csv': ',', 'ssv': ' ', 'tsv': '\t', 'pipes': '|'} values = [] if collection_format in splitter_for: values = value.split(splitter_for[collection_format]) elif collection_format == 'multi': # No idea if Django handles multiple arguments by putting them # into an array itself, but let's start with this idea values = value else: raise InvalidParameterDefinition( f"Array parameter collection format {collection_format} not recognised" ) if param.items_ is None: raise InvalidParameterDefinition( "Array parameter has not defined the type of its items" ) return [validate_param_part(name, param.items_, v) for v in values] # Handle any Pythonic type conversions first if param.type == openapi.TYPE_BOOLEAN: # Booleans don't do any further processing, so exit now return value in ('true', '1', 'yes') elif param.type == openapi.TYPE_INTEGER: try: value = int(value) except ValueError: raise ValueError( f"The value for the '{name}' field must be an integer" ) elif param.type == openapi.TYPE_NUMBER: try: value = float(value) except ValueError: raise ValueError( f"The value for the '{name}' field must be a floating point number" ) elif param.type == openapi.TYPE_STRING: # Check string pattern and format possibilities. pattern = getattr(param, 'pattern', None) if pattern: if not re.match(pattern, value): raise BadRequest( f"The value of the '{name}' field did not match the " f"pattern '{pattern}'" ) param_format = getattr(param, 'format', 'NONE') if param_format == openapi.FORMAT_DATE: try: # datetime.date objects cannot be timezone aware, so they # have to be converted into datetimes. Haven't found a better # way of doing this: value = make_aware(datetime.fromordinal(parse_date(value).toordinal())) except ValueError: raise BadRequest( f"The value for the '{name}' field did not look like a date" ) elif param_format == openapi.FORMAT_DATETIME: try: value = make_aware(parse_datetime(value)) except ValueError: raise BadRequest( f"The value for the '{name}' field did not look like a datetime" ) # We don't check any of the other formats here (yet). # Check enumeration if hasattr(param, 'enum'): if value not in param.enum: raise ValueError( f"The value for the '{name}' field is required to be one of" "the following values:" + ', '.join(str(e) for e in param.enum) ) # Check pattern if hasattr(param, 'pattern'): regex = re.compile(param.pattern) if not re.fullmatch(value): raise ValueError( f"The value '{value}' for parameter '{name}' did" f"not match the pattern {param.pattern}" ) # OK, validation has passed, return the value here return value
def dashboard(request, username=None): if username: user = get_object_or_404(MyUser, username=username) dashboard = False else: user = get_object_or_404(MyUser, pk=request.user.pk) dashboard = True if "date" in request.GET: today_date = parse_date(request.GET["date"]) today_date = datetime.datetime(today_date.year, today_date.month, today_date.day) else: today_date = datetime.datetime.utcnow().date() today_date = datetime.datetime(today_date.year, today_date.month, today_date.day) active_entry = user.get_today_timeentry() entries = user.get_todat_timeentry(today_date) entries_totals = Project.objects.values('name', 'client__name').filter( Q( Q(timeentry__user=user) & Q( Q(timeentry__started_at__gt=today_date) & Q(timeentry__started_at__lt=today_date + datetime.timedelta(days=1))))).annotate( dsum=Sum("timeentry__duration")) today_totals = TimeEntry.objects.filter( Q( Q(user=user) & Q( Q(started_at__gt=today_date) & Q(started_at__lt=today_date + datetime.timedelta(days=1)))) ).aggregate(Sum("duration")) week_totals = TimeEntry.objects.filter( Q( Q(user=user) & Q( Q(started_at__lt=today_date) & Q(started_at__gt=today_date - datetime.timedelta(days=7)))) ).aggregate(Sum("duration")) if today_totals and "duration__sum" in today_totals and today_totals[ "duration__sum"]: today_totals = today_totals["duration__sum"] else: today_totals = 0 if week_totals and "duration__sum" in week_totals and week_totals[ "duration__sum"]: week_totals = int(week_totals["duration__sum"] / 5) else: week_totals = 0 attendance = Attendance.objects.filter(user=user).last() queue_tasks = Task.objects.filter( Q(Q(submitted_for=user) & Q(Q(status="WA") | Q(status="WO")))) return render( request, 'main/dashboard/dashboard.html', { "active_task": active_entry, "today_tasks": entries, "queue_tasks": queue_tasks, "dashboard": dashboard, "user": user, "attendance": attendance, "entries_totals": entries_totals, "today_totals": today_totals, "week_totals": week_totals, "today_date": today_date, })
def clean_start_date(self): data = self.data['start_date'] return dateparse.parse_date(data)
def student_reg(request): if not request.session['employee_id']: return redirect('/login/') class_list = models.ClassInfo.objects.filter(status = True) shift_list = models.Shift.objects.filter(status = True) section_list = models.Section.objects.filter(status = True) session_list = models.SessionInfo.objects.filter(status = True) running_year = models.Years.objects.filter(status = True) group_lsit = models.GroupTypeList.objects.filter(status = True).order_by("-id") context={ 'class_list':class_list, 'shift_list':shift_list, 'section_list':section_list, 'session_list':session_list, 'running_year':running_year, 'group_lsit':group_lsit, } try: if request.method == 'POST': student_id = request.POST['student_id'] st_first_name = request.POST['st_first_name'] st_last_name = request.POST['st_last_name'] st_bangla_name = request.POST['st_bangla_name'] st_gender = request.POST['st_gender'] st_religion = request.POST['st_religion'] date_of_birth = parse_date(request.POST['date_of_birth']) birth_certificate_no = request.POST['birth_certificate_no'] st_mobile = request.POST['st_mobile'] st_email = request.POST['st_email'] st_blood_group = request.POST['st_blood_group'] class_name = int(request.POST['class_name']) shift_name = int(request.POST['shift_name']) section_name = int(request.POST['section_name']) session_name = int(request.POST['session_name']) running_year = int(request.POST['running_year']) group_type = int(request.POST['group_type']) student_type = request.POST['student_type'] st_roll = request.POST['st_roll'] st_reg = request.POST['st_reg'] psc_roll = request.POST['psc_roll'] jsc_roll = request.POST['jsc_roll'] father_name = request.POST['father_name'] father_bangla_name = request.POST['father_bangla_name'] father_occupation = request.POST['father_occupation'] father_mobile = request.POST['father_mobile'] mother_name = request.POST['mother_name'] mother_bangla_name = request.POST['mother_bangla_name'] mother_occupation = request.POST['mother_occupation'] mother_mobile = request.POST['mother_mobile'] guardian_name = request.POST['guardian_name'] guardian_relation = request.POST['guardian_relation'] guardian_mobile = request.POST['guardian_mobile'] guardian_monthly_income = request.POST['guardian_monthly_income'] present_address = request.POST['present_address'] parmanent_address = request.POST['parmanent_address'] previous_school_name = request.POST['previous_school_name'] previous_school_address = request.POST['previous_school_address'] tc_no = request.POST['tc_no'] tc_date = parse_date(request.POST['tc_date']) fitness_details = request.POST['fitness_details'] student_status = request.POST['student_status'] new_student = True if request.POST['new_student'] else False print("joshef", student_status) order_file1 = "" if bool(request.FILES.get('student_img', False)) == True: file = request.FILES['student_img'] order_file1 = "student_img"+file.name if not os.path.exists(settings.MEDIA_ROOT+"student_img"): os.mkdir(settings.MEDIA_ROOT+"student_img") default_storage.save(settings.MEDIA_ROOT+"student_img"+file.name, ContentFile(file.read())) order_file2 = "" if bool(request.FILES.get('father_photo', False)) == True: file = request.FILES['father_photo'] order_file2 = "father_photo"+file.name if not os.path.exists(settings.MEDIA_ROOT+"father_photo"): os.mkdir(settings.MEDIA_ROOT+"father_photo") default_storage.save(settings.MEDIA_ROOT+"father_photo"+file.name, ContentFile(file.read())) order_file3 = "" if bool(request.FILES.get('mother_photo', False)) == True: file = request.FILES['mother_photo'] order_file3 = "mother_photo"+file.name if not os.path.exists(settings.MEDIA_ROOT+"mother_photo"): os.mkdir(settings.MEDIA_ROOT+"mother_photo") default_storage.save(settings.MEDIA_ROOT+"mother_photo"+file.name, ContentFile(file.read())) order_file4 = "" if bool(request.FILES.get('medical_certificate', False)) == True: file = request.FILES['medical_certificate'] order_file4 = "medical_certificate"+file.name if not os.path.exists(settings.MEDIA_ROOT+"medical_certificate"): os.mkdir(settings.MEDIA_ROOT+"medical_certificate") default_storage.save(settings.MEDIA_ROOT+"medical_certificate"+file.name, ContentFile(file.read())) order_file5 = "" if bool(request.FILES.get('guardian_photo', False)) == True: file = request.FILES['guardian_photo'] order_file5 = "guardian_photo"+file.name if not os.path.exists(settings.MEDIA_ROOT+"guardian_photo"): os.mkdir(settings.MEDIA_ROOT+"guardian_photo") default_storage.save(settings.MEDIA_ROOT+"guardian_photo"+file.name, ContentFile(file.read())) if student_status == 1: student_status = True elif student_status == 2: student_status = False if models.Student.objects.create(student_id= student_id, st_first_name = st_first_name, st_last_name = st_last_name, st_bangla_name = st_bangla_name, st_gender = st_gender, st_religion = st_religion, date_of_birth= date_of_birth, birth_certificate_no=birth_certificate_no, st_mobile = st_mobile, st_blood_group= st_blood_group, class_name_id =class_name, shift_name_id = shift_name, section_name_id = section_name, session_name_id = session_name, running_year_id = running_year, group_type_id = group_type, student_type = student_type, st_roll= st_roll, st_reg=st_reg, psc_roll =psc_roll, jsc_roll = jsc_roll, father_name= father_name, father_bangla_name=father_bangla_name, father_occupation = father_occupation, father_mobile = father_mobile, mother_name = mother_name, mother_bangla_name= mother_bangla_name, mother_occupation=mother_occupation, mother_mobile = mother_mobile, guardian_name = guardian_name, guardian_relation = guardian_relation, guardian_mobile= guardian_mobile, guardian_monthly_income=guardian_monthly_income, present_address = present_address, parmanent_address = parmanent_address, previous_school_name= previous_school_name, previous_school_address = previous_school_address, tc_no = tc_no, tc_date = tc_date, fitness_details = fitness_details,status = student_status, new_student = new_student, student_img = order_file1, father_photo = order_file2, mother_photo = order_file3, medical_certificate = order_file4, guardian_photo = order_file5 ): messages.success(request,"Student Admission Successful") return redirect('/student-list/') else: messages.success(request,"Please Input Valid Value.") return redirect('/student-registration/') except: messages.success(request,"Please Input Valid Value.") return render(request, 'school/admin/reg_student.html', context)
def get_term_from_date_range(request): ''' 获取时间范围对应的学期 1. 如果是start_date和end_date: 1.1 start_date在某个学期内,记为t1 start_date不在学期内,就查找离它最近的下一个学期t1 如果没有符合条件的t1,报错退出 1.2 end_date在某个学期内,记为t2 end_date不在学期内,就查找离它最近的前一个学期t2 如果没有符合条件的t2,报错退出 1.3 如果t1 == t2,return t1 否则报错退出 2. 如果是school_year和term_type,直接获取对应的学期,或者报错退出 3. 如果server_type不是校级,上述查询条件要加上school_uuid ''' start_date = request.REQUEST.get('start_date') end_date = request.REQUEST.get('end_date') school_year = request.REQUEST.get('school_year') term_type = request.REQUEST.get('term_type') current_term = request.REQUEST.get('current_term', 'false') server_type = models.Setting.getvalue('server_type') if server_type == 'school': school = models.Group.objects.get(group_type='school') else: school_uuid = request.REQUEST.get('uuid') school = models.Group.objects.get(uuid=school_uuid) if start_date and end_date: s = parse_date(start_date) e = parse_date(end_date) try: t1 = models.Term.objects.get(start_date__lte=s, end_date__gte=s, school=school) except: t1 = models.Term.objects.filter(start_date__gte=s, school=school) t1 = t1.order_by('start_date') if t1.exists(): t1 = t1[0] else: return u'开始时间(%s)不在任何学期内' % start_date try: t2 = models.Term.objects.get(start_date__lte=e, end_date__gte=e, school=school) except: t2 = models.Term.objects.filter(end_date__lte=e, school=school) t2 = t2.order_by('-end_date') if t2.exists(): t2 = t2[0] else: return u'结束时间(%s)不在任何学期内' % end_date if t1 == t2: return t1 else: return u'开始时间(%s)(%s%s)与结束时间(%s)(%s%s)不在同一学期内' % ( start_date, t1.school_year, t1.term_type, end_date, t2.school_year, t2.term_type) elif school_year and term_type: try: t = models.Term.objects.get(school_year=school_year, term_type=term_type, school=school) return t except: return u'查询的学年学期(%s%s)不存在' % (school_year, term_type) else: if current_term == 'true': try: return models.Term.get_current_term_list(school=school)[0] except Exception as e: pass
def setUp(self): """ initialisatie van de test case """ # NhbLib sporter = Sporter() sporter.lid_nr = 123456 sporter.voornaam = 'Jan' sporter.achternaam = 'Schutter' sporter.email = '*****@*****.**' sporter.geboorte_datum = parse_date('1970-03-03') sporter.woon_straatnaam = 'Papendal' sporter.geslacht = 'M' sporter.sinds_datum = parse_date("1991-02-03") # Y-M-D sporter.save() sporter = Sporter() sporter.lid_nr = 123457 sporter.voornaam = 'Petra' sporter.achternaam = 'Schutter' sporter.email = '*****@*****.**' sporter.geboorte_datum = parse_date('1970-01-30') sporter.woon_straatnaam = 'Arnhem' sporter.geslacht = 'V' sporter.sinds_datum = parse_date("1991-02-05") # Y-M-D sporter.save() # Record 42 rec = IndivRecord() rec.volg_nr = 42 rec.discipline = DISCIPLINE[0][0] # OD rec.soort_record = 'Test record' rec.geslacht = GESLACHT[0][0] # M rec.leeftijdscategorie = LEEFTIJDSCATEGORIE[0][0] # M rec.materiaalklasse = MATERIAALKLASSE[0][0] # R # rec.materiaalklasse_overig = rec.sporter = sporter rec.naam = 'Top Schutter' rec.datum = parse_date('2017-08-27') rec.plaats = 'Papendal' rec.land = 'Nederland' rec.score = 1234 rec.max_score = 5678 rec.x_count = 56 # rec.score_notitie = # rec.is_european_record = # rec.is_world_record = rec.save() self.assertEqual(rec.score_str(), '1234 (56X)') self.assertEqual(rec.max_score_str(), '5678 (567X)') # Record 43 rec = IndivRecord() rec.volg_nr = 43 rec.discipline = DISCIPLINE[1][0] # 18 rec.soort_record = 'Test record para' rec.geslacht = GESLACHT[1][0] # V rec.leeftijdscategorie = LEEFTIJDSCATEGORIE[1][0] # S rec.materiaalklasse = 'R' # Recurve rec.para_klasse = 'Open' # rec.sporter = rec.naam = 'Top Schutter 2' rec.datum = datetime.datetime.now() rec.plaats = 'Ergens Anders' rec.land = 'Nederland' rec.score = 1235 # rec.score_notitie = # rec.is_european_record = # rec.is_world_record = rec.save() self.assertIsNotNone(str(rec)) self.assertEqual(rec.score_str(), '1235') # Record 44 rec = IndivRecord() rec.volg_nr = 44 rec.discipline = DISCIPLINE[2][0] # 25 rec.soort_record = '25m' rec.geslacht = GESLACHT[1][0] # V rec.leeftijdscategorie = LEEFTIJDSCATEGORIE[3][0] # C rec.materiaalklasse = 'R' # Recurve rec.sporter = sporter rec.naam = 'Petra Schutter' rec.datum = parse_date('2017-08-27') rec.plaats = 'Nergens' rec.land = 'Niederland' rec.score = 249 # rec.score_notitie = # rec.is_european_record = # rec.is_world_record = rec.save() self.assertIsNotNone(str(rec))
def _parse_date(date): parsed_date = parse_date(date) return parsed_date
def test_correctly_birthday(self): """test is birthday save correctly""" from django.utils.dateparse import parse_date self.assertEqual(parse_date(self.date_of_birth1), self.user1.date_of_birth)
def export(request, event_url_name, type, job_pk=None, date_str=None): # check for valid export type if type not in ["excel", "pdf"]: raise Http404 # get event event = get_object_or_404(Event, url_name=event_url_name) # list of jobs for export if job_pk: job = get_object_or_404(Job, pk=job_pk) # check permission if not job.is_admin(request.user): return nopermission(request) jobs = [job, ] filename = "%s - %s" % (event.name, job.name) else: # check permission if not event.is_admin(request.user): return nopermission(request) jobs = event.job_set.all() filename = event.name # parse date date = None if date_str: try: date = parse_date(date_str) except ValueError: raise Http404 # check if there are any shifts with this start date if not Shift.objects.filter(job__in=jobs, begin__date=date).exists(): raise Http404 # if all jobs are shown, exclude all jobs without shifts on this day if not job_pk: jobs = jobs.filter(shift__begin__date=date).distinct() filename = "{} - {}_{:02d}_{:02d}".format(filename, date.year, date.month, date.day) # escape filename filename = escape_filename(filename) # create buffer buffer = BytesIO() # do filetype specific stuff if type == 'excel': filename = "%s.xlsx" % filename content_type = "application/vnd.openxmlformats-officedocument" \ ".spreadsheetml.sheet" xlsx(buffer, event, jobs, date) elif type == 'pdf': filename = "%s.pdf" % filename content_type = 'application/pdf' pdf(buffer, event, jobs, date) # start http response response = HttpResponse(content_type=content_type) response['Content-Disposition'] = 'attachment; filename="%s"' % filename # close buffer, send file data = buffer.getvalue() buffer.close() response.write(data) return response
def to_python_date(value): date = parse_date(value) if value and not date: raise ValueError('Can\'t convert "{}" to date'.format(value)) return date
def sql_query(self): from_date = self.month till_date = str(parse_date(from_date) + relativedelta(months=1)) return self._raw_sql_query.format(location_id=self.location_id, from_date=from_date, till_date=till_date)
from django.utils.dateparse import ( parse_date, parse_datetime, parse_duration, parse_time, ) logger = logging.getLogger(__name__) # Lambdas that attempt to convert something that failed while being parsed by `ast.literal_eval`. CASTERS = [ lambda a: parse_datetime(a), lambda a: parse_time(a), lambda a: parse_date(a), lambda a: parse_duration(a), lambda a: UUID(a), ] class InvalidKwarg(Exception): pass def _get_expr_string(expr: ast.expr) -> str: """ Builds a string based on traversing `ast.Attribute` and `ast.Name` expressions. Args: expr: Expression node of the the AST tree. Only handles `ast.Attribute` and `ast.Name` expressions.
def get(self, request): start = self.request.query_params.get('start', None) end = self.request.query_params.get('end', None) # Validation if start is None and end is None: return Response({'error': 'date not provide'}, status=status.HTTP_400_BAD_REQUEST) start = parse_date(start) end = parse_date(end) if start is None and end is None: return Response({'error': 'date incorrect'}, status=status.HTTP_400_BAD_REQUEST) # Coworking Space query_coworking = Payment.objects.filter( date_created__date__gte=start, date_created__date__lte=end, coworkingspacesubscription__isnull=False) group_coworking = query_coworking.annotate( date=TruncDay('date_created', output_field=DateField())) \ .values('date').annotate(count=Count('id'), amount=Sum('amount')) stat_coworking = [] coworkingspace_sale = 0 for result in group_coworking: stat_coworking.append(result) coworkingspace_sale += result['amount'] # Meeting Room query_meetingroom = Payment.objects.filter( date_created__date__gte=start, date_created__date__lte=end, meetingroombooking__isnull=False) group_meetingroom = query_meetingroom.annotate( date=TruncDay('date_created', output_field=DateField())) \ .values('date').annotate(count=Count('id'), amount=Sum('amount')) stat_meetingroom = [] meetingroom_sale = 0 for result in group_meetingroom: stat_meetingroom.append(result) meetingroom_sale += result['amount'] # Payment Method query_method = Payment.objects.filter(date_created__date__gte=start, date_created__date__lte=end) group_method = query_method.values('method').annotate( count=Count('id'), amount=Sum('amount')) payment_method = [] for result in group_method: payment_method.append(result) # Customer query_customer = User.objects.filter(date_joined__date__lte=end) customer_all = query_customer.count() customer_new = query_customer.filter( date_joined__date__gte=start).count() customer_old = customer_all - customer_new return Response({ 'total_sale': coworkingspace_sale + meetingroom_sale, 'coworkingspace_sale': coworkingspace_sale, 'meetingroom_sale': meetingroom_sale, 'coworkingspace_sale_by_date': stat_coworking, 'meetingroom_sale_by_date': stat_meetingroom, 'payment_method': payment_method, 'customer_all': customer_all, 'customer_new': customer_new, 'customer_old': customer_old })
def value_decode(self, value): return parse_date(value) if isinstance(value, basestring) else None
def to_python(cls, value, **kwargs): parsed = parse_date(force_text(value)) if parsed is None: raise cls.exception("Value {0} cannot be converted to a date object".format(value)) return parsed
return o.strftime("%Y-%m-%d") elif isinstance(o, datetime.time): return o.strftime("%H:%M:%S.%f%z") raise TypeError("Cannot encode %r" % o) _PATTERNS = [ ( re.compile(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}"), ( lambda value: datetime.datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f"), lambda value: datetime.datetime.strptime(value, "%Y-%m-%dT%H:%M:%S"), lambda value: parse_datetime(value), ), ), (re.compile(r"\d{4}-\d{2}-\d{2}"), (lambda value: parse_date(value),)), (re.compile(r"\d{2}:\d{2}:\d{2}"), (lambda value: parse_time(value),)), ] def json_decode_hook(data): for key, value in list(data.items()): if not isinstance(value, six.string_types): continue for regex, fns in _PATTERNS: if regex.match(value): for fn in fns: try: data[key] = fn(value) break
def rfi_comite_proceso(request): datos={} suma_finales = 0 suma_brokers = 0 suma_banco_brokers = 0 #generacion_total = 0 fechas = request.GET.copy() fecha_inicial = parse_date(fechas['fecha_inicial']) fecha_final = parse_date(fechas['fecha_final']) datos['fecha_inicial'] = fecha_inicial datos['fecha_final'] = fecha_final c = rfi_beta.objects.raw('''DELETE FROM "RFI_rfi_generacion_comite_temporal";INSERT INTO "RFI_rfi_generacion_comite_temporal"(pais, categoria, nombre_cliente, generacion_brk, generacion_finales, generacion_bancos_brk, generacion_total, spread_brk, spread_finales, spread_banco_brk, generacion_brk_t_1, generacion_finales_t_1, generacion_bancos_brk_t_1, generacion_total_t_1) SELECT * from eq_rfi_generacion(%s,%s) WHERE generacion_total<>0;SELECT 1 as linea, * FROM "RFI_rfi_generacion_comite_temporal" order by generacion_total desc; ''',[fecha_inicial,fecha_final]) datos['rfi_generaciones'] = c for r in c: suma_finales += r.generacion_finales suma_brokers += r.generacion_brk suma_banco_brokers += r.generacion_bancos_brk datos['generacion_finales'] = suma_finales datos['generacion_brokers'] = suma_brokers datos['generacion_bancos_brokers'] = suma_banco_brokers generacion_total = rfi_beta.objects.raw('''select 1 as linea, sum(ingreso_mesa) as total from "RFI_rfi_beta" where fecha between %s and %s ''',[fecha_inicial,fecha_final]) for z in generacion_total: datos['generacion_total'] = z.total datos['cntry_of_risk'] = rfi_beta.objects.raw(''' select 1 as linea,country_of_risk as pais, count(papel) as conteo,sum(ingreso_mesa) as generacion from "RFI_rfi_beta" where fecha between %s and %s group by 2 order by generacion desc''',[fecha_inicial,fecha_final]) datos['series'] = rfi_beta.objects.raw(''' select 1 as linea, fecha, round(avg(spread_mesa),2) as spread, sum(abs(nominales)) as nominales, sum(ingreso_mesa) as generacion from "RFI_rfi_beta" where fecha between %s and %s group by fecha order by fecha desc ''',[fecha_inicial,fecha_final]) datos['generacion_categoria'] = rfi_beta.objects.raw(''' select 1 as linea,categoria,count(categoria) as conteo,sum(generacion_total) as generacion from "RFI_rfi_generacion_comite_temporal" where generacion_total<>0 group by categoria ''',[fecha_inicial,fecha_final]) datos['generacion_pais'] = rfi_beta.objects.raw(''' select 1 as linea,pais,count(pais) as conteo,sum(generacion_total) as generacion from "RFI_rfi_generacion_comite_temporal" where generacion_total<>0 group by pais ''',[fecha_inicial,fecha_final]) datos['actividad_brokers'] = rfi_beta.objects.raw(''' select 1 as linea, * from eq_rfi_actividad_brokers(%s,%s) where monto<>0 order by monto desc ''',[fecha_inicial,fecha_final]) #papeles datos['papeles'] = rfi_beta.objects.raw(''' select 1 as linea,papel,sum(ingreso_mesa) as ingreso_mesa,count(papel) as conteo,round(avg(spread_mesa),2) as spread_mesa_promedio,sum(abs(nominales)) as nominales from "RFI_rfi_beta" where fecha between %s and %s group by papel order by ingreso_mesa desc ''',[fecha_inicial,fecha_final]) #abajo se llama la funcion con el crosstab tipo json desde postgresql cross = rfi_beta.objects.raw(''' select 1 as linea, * from eq_rfi_generacion_crosstab_3() ''') #acá le damos algo de formato datos['operaciones_entre_brks'] = rfi_beta.objects.raw('''select 1 as linea,a.fecha,a.papel,a.comprador,a.vendedor,a.spread_mesa,a.ingreso_mesa,b.categoria,c.categoria from "RFI_rfi_beta" a JOIN "RFI_clientes_rfi" b ON a.comprador=b.fondo JOIN "RFI_clientes_rfi" c ON a.vendedor=c.fondo where fecha between %s and %s and ( (b.categoria='DLR' and c.categoria='BKR') or (b.categoria='BKR' and c.categoria='DLR') or (b.categoria='BKR' and c.categoria='BKR') or (b.categoria='DLR' and c.categoria='DLR') ); ''',[fecha_inicial,fecha_final]) encabezados_query = comite.objects.distinct('pais').values_list('pais',flat=True) for r in cross: resultado = r.datos tabla = [] encabezados = [x for x in encabezados_query] encabezados.insert(0,'CATEGORIA') tabla.append(encabezados) for h in resultado: fila=[] fila = [v for k,v in h.items()] for i,item in enumerate(fila): if fila[i] is None: fila[i]=0 tabla.append(fila) datos['cross'] = tabla datos['metas']=rfi_beta.objects.raw(''' select 1 as linea,date_trunc('month',fecha)::date as mensual,coalesce(sum(ingreso_mesa) FILTER (WHERE vendedor='FYNSA' or comprador='FYNSA'),0) as ingreso_banca_privada,sum(ingreso_mesa) FILTER (WHERE vendedor!='FYNSA' and comprador!='FYNSA') as ingreso_resto from "RFI_rfi_beta" group by mensual order by mensual asc; ''') datos['generacion_mensual'] = rfi_beta.objects.raw('''select 1 as linea,mes,"2014","2015","2016","2017","2018","2019","2020",COALESCE("2021",0) as "2021",COALESCE("2022",0) as "2022" FROM crosstab('select date_part(''month'',fecha) as mes,date_part(''YEAR'',fecha) as agno, sum(ingreso_mesa) from "RFI_rfi_beta" group by mes,agno order by mes,agno','select m from generate_series(2014,2022) m') as ct(mes numeric, "2014" numeric, "2015" numeric,"2016" numeric,"2017" numeric,"2018" numeric,"2019" numeric, "2020" numeric,"2021" numeric,"2022" numeric) ''') #tortas móviles datos['tortas_moviles'] = rfi_beta.objects.raw(''' select 1 as linea,* from eq_rfi_serie_tortas_moviles() where mes between '2020-01-01' and '2022-01-01' order by mes asc; ''') return render(request,'comite-rfi-salida.html',datos)
def load_feature(self, feature, mapping, kind, level): nuts = feature["NUTS"].as_string() gf = int(feature["GF"].as_string()) if gf < 4: # Only import land masses return name = feature["GEN"].as_string() kind_detail = feature["BEZ"].as_string() nbd = feature["NBD"].as_string() full_name = "%s %s" % (kind_detail, name) if nbd == "ja" else name slug = slugify(full_name) geom = mapping.feature_kwargs(feature)["geom"] extra_data = {} try: wsk = feature["WSK"].as_string() if "/" in wsk: wsk = wsk.replace("/", "-") valid_on_date = parse_date(wsk) valid_on = localize_date( datetime.combine(valid_on_date, datetime.min.time()) ) except Exception as e: print(wsk) print(e) valid_on = None region_identifier = feature["ARS"].as_string() geo_region = GeoRegion.objects.filter( region_identifier=region_identifier, kind=kind ).first() region_exists = bool(geo_region) if not region_exists: # Try to find match: geo_region = GeoRegion.objects.filter( region_identifier__startswith=region_identifier[:5], kind=kind, name=name, ).first() if geo_region: extra_data["former_ars"] = geo_region.data.get("former_ars", []) extra_data["former_ars"].append(geo_region.region_identifier) region_exists = bool(geo_region) parent = None tree_params = {} parent_region_identifier = region_identifier while True: parent_region_identifier = get_higher_ars(parent_region_identifier) print(name, region_identifier, parent_region_identifier) if parent_region_identifier: try: parent = self.georegions.get( region_identifier=parent_region_identifier ) except GeoRegion.DoesNotExist: continue if not region_exists: parent.numchild += 1 parent.save(update_fields=["numchild"]) if not region_exists or geo_region.part_of_id != parent.id: tree_params = get_new_child_params(parent) if GeoRegion.objects.filter(path=tree_params["path"]).exists(): # add_children(parent, get_children) # raise Exception('Path already exists') GeoRegion.fix_tree() if geo_region: geo_region.part_of = parent else: tree_params = get_new_child_params(None) break data = { "slug": slug, "name": name, "kind": kind, "kind_detail": kind_detail, "level": level, "region_identifier": region_identifier, "global_identifier": nuts, "geom": geom, "area": feature.geom.area, "valid_on": valid_on, "invalid_on": None, "part_of": parent, "data": { "label": full_name, "nuts": nuts, "DEBKG_ID": feature["DEBKG_ID"].as_string(), **extra_data, }, } if region_exists: GeoRegion.objects.filter(id=geo_region.id).update(**data) else: data.update(tree_params) GeoRegion.objects.create(**data) return region_identifier
def all_home(request): if request.method == "GET": from django.utils.timezone import localdate from django.utils.dateparse import parse_date from datetime import datetime, timedelta d = request.GET.get('d', None) print(request.get_full_path()) if d is None: if not request.get_full_path() == '/all/': return redirect(reverse('baseapp:all_home')) today_str = str(localdate()) today_obj = datetime(int(today_str.split('-')[0]), int(today_str.split('-')[1]), int(today_str.split('-')[2])) yesterday_obj = today_obj - timedelta(days=1) yesterday_str = str(yesterday_obj).split(' ')[0] return render(request, 'baseapp/all_home.html', {'day': today_str, 'yesterday': yesterday_str, 'today': today_str, 'tomorrow': ''}) else: date = parse_date(d) if date is None: return redirect(reverse('baseapp:all_home')) else: today_str = str(localdate()) day_str = str(date) if today_str == day_str: return redirect(reverse('baseapp:all_home')) # first_post_created = Post.objects.first().created # origin_str = str(first_post_created).split(' ')[0] origin_str = '2018-11-10' date_1 = None date_2 = None date_3 = None try: date_1 = datetime.strptime(origin_str, '%Y-%m-%d') date_2 = datetime.strptime(day_str, '%Y-%m-%d') date_3 = datetime.strptime(today_str, '%Y-%m-%d') except Exception as e: return redirect(reverse('baseapp:all_home')) if not date_1 <= date_2 <= date_3: return redirect(reverse('baseapp:all_home')) day_obj = datetime(int(day_str.split('-')[0]), int(day_str.split('-')[1]), int(day_str.split('-')[2])) yesterday_obj = day_obj - timedelta(days=1) yesterday_str = str(yesterday_obj).split(' ')[0] tomorrow_obj = day_obj + timedelta(days=1) tomorrow_str = str(tomorrow_obj).split(' ')[0] return render(request, 'baseapp/all_home.html', {'day': day_str, 'yesterday': yesterday_str, 'today': today_str, 'tomorrow': tomorrow_str})
def make_reservation(request): # due to unexplained CSRF failures for users try a workaround http_origins = [ 'https://rea-demo.mri.psu.edu:8080', 'https://rea-demo.mri.psu.edu:8888', 'https://leo.psu.edu' ] if request.META.get('HTTP_ORIGIN') in http_origins: origin = request.META.get('HTTP_ORIGIN') referer = str(origin) + "/new_reservation/" + str( request.POST.get('tool_id')) + "/" + str( request.POST['date']) + "/" if referer != request.META.get('HTTP_REFERER'): return render(request, 'mobile/error.html', {'message': 'Unknown referer - request rejected'}) else: return render(request, 'mobile/error.html', {'message': 'Unknown origin - request rejected'}) # Create a reservation for a user. try: date = parse_date(request.POST['date']) start = localize( datetime.combine(date, parse_time(request.POST['start']))) end = localize(datetime.combine(date, parse_time(request.POST['end']))) except: return render( request, 'mobile/error.html', { 'message': 'Please enter a valid date, start time, and end time for the reservation.' }) tool = get_object_or_404(Tool, id=request.POST.get('tool_id')) # Create the new reservation: reservation = Reservation() reservation.user = request.user reservation.creator = request.user reservation.tool = tool reservation.start = start reservation.end = end reservation.short_notice = determine_insufficient_notice(tool, start) policy_problems, overridable = check_policy_to_save_reservation( request, None, reservation, request.user, False) # If there was a problem in saving the reservation then return the error... if policy_problems: return render(request, 'mobile/error.html', {'message': policy_problems[0]}) # All policy checks have passed. if request.user.is_staff: mode = request.POST['staff_charge'] if mode == "self": # make a reservation for the user and don't add a record to the ReservationProject table active_projects = request.user.active_projects() if len(active_projects) == 1: reservation.project = active_projects[0] else: try: reservation.project = Project.objects.get( id=request.POST['project_id']) except: msg = 'No project was selected. Please return to the <a href="/calendar/">calendar</a> to try again.' return render(request, 'mobile/error.html', {'message': msg}) else: # add ReservationProject entries for the customers submitted by the staff member reservation_projects = {} reservation.save() for key, value in request.POST.items(): if is_valid_field(key): attribute, separator, index = key.partition("__") index = int(index) if index not in reservation_projects: reservation_projects[index] = ReservationProject() reservation_projects[index].reservation = reservation reservation_projects[index].created = timezone.now() reservation_projects[index].updated = timezone.now() if attribute == "chosen_user": if value is not None and value != "": reservation_projects[ index].customer = User.objects.get(id=value) else: reservation.delete() return HttpResponseBadRequest( 'Please choose a user for whom the tool will be run.' ) if attribute == "chosen_project": if value is not None and value != "" and value != "-1": reservation_projects[ index].project = Project.objects.get(id=value) else: reservation.delete() return HttpResponseBadRequest( 'Please choose a project for charges made during this run.' ) for r in reservation_projects.values(): r.full_clean() r.save() else: try: reservation.project = Project.objects.get( id=request.POST['project_id']) except: if not request.user.is_staff: return render(request, 'mobile/error.html', { 'message': 'You must specify a project for your reservation' }) reservation.additional_information, reservation.self_configuration, res_conf = extract_configuration( request) # Reservation can't be short notice if the user is configuring the tool themselves. if reservation.self_configuration: reservation.short_notice = False policy_problems, overridable = check_policy_to_save_reservation( request, None, reservation, request.user, False) reservation.save() for rc in res_conf: rc.reservation = reservation rc.save() return render(request, 'mobile/reservation_success.html', {'new_reservation': reservation})