def test_coreitemvalues(self): self.assertEquals(self.item.get('type'), 'text') self.assertEquals(self.item.get('urgency'), 4) self.assertEquals(self.item.get('version'), '1') self.assertEquals(self.item.get('versioncreated'), utc.localize(datetime.datetime(2014, 8, 29, 13, 49, 51))) self.assertEquals(self.item.get('firstcreated'), utc.localize(datetime.datetime(2014, 8, 29, 13, 49, 51))) self.assertEquals(self.item.get('pubstatus'), 'usable')
def test_save_rearrange_episodes(self): self.assertEqual(self.episode.issue_date, utc.localize(datetime.datetime(2014, 1, 6, 14, 0, 0))) self.episode.issue_date = None self.episode.save() self.schedule.save() self.episode.refresh_from_db() self.assertEqual(self.episode.issue_date, utc.localize(datetime.datetime(2014, 1, 6, 14, 0, 0)))
def test_add_to_log(tmpdir): logs = [ FoodLog( utc.localize(datetime.datetime.utcnow()), 1, 5, ['sleepy', 'hungry'], ['broccoli'], ['test'], SNACK, "" ), FoodLog( utc.localize(datetime.datetime.utcnow()), 5, 2, ['happy'], ['popcorn', 'butter'], ['movies', 'test'], SNACK, "I am a note" ) ] log_file = str(tmpdir.join("log.txt")) for l in logs: add_log_to_file(log_file, l) with open(log_file, 'r') as infile: results = [json_to_food_log(l) for l in infile] assert logs == results
def setUp(self): self.calendar = Calendar.objects.create(name='Calendar', is_active=True) self.recurrences = recurrence.Recurrence( rrules=[recurrence.Rule(recurrence.WEEKLY, until=utc.localize(datetime.datetime(2014, 1, 31)))]) programme = Programme.objects.filter(name="Classic hits").get() programme.name = "Classic hits 2" programme.slug = None programme.id = programme.pk = None programme.save() self.programme = programme self.schedule = Schedule.objects.create( programme=self.programme, type='L', recurrences=self.recurrences, start_dt=utc.localize(datetime.datetime(2014, 1, 6, 14, 0, 0)), calendar=self.calendar) self.episode = Episode.objects.create( title='Episode 1', programme=programme, summary='', season=1, number_in_season=1, ) self.programme.rearrange_episodes(pytz.utc.localize(datetime.datetime(1970, 1, 1)), Calendar.get_active()) self.episode.refresh_from_db()
def ics(group): locale.setlocale(locale.LC_ALL, 'fr_FR.utf8') login, pw = read_credentials() request = requests.get( 'https://edt.univ-nantes.fr/sciences/' + group + '.ics', auth=(login, pw)) if not 200 <= request.status_code < 300: return "Error status while retrieving the ics file." format = "%Y%m%dT%H%M%SZ" now = datetime.utcnow().strftime(format) paris = pytz.timezone('Europe/Paris') current = '99991231T235959Z' dtstart = dtend = description = '' for component in Calendar.from_ical(request.text).walk(): if component.name == 'VEVENT': current_start = component.get('DTSTART').to_ical() if now > current_start: continue if current_start < current: current = current_start description = unicode(component.get('DESCRIPTION')) start = component.get('DTSTART').to_ical() end = component.get('DTEND').to_ical() dtutcstart = utc.localize(datetime.strptime(start, format)) dtutcend = utc.localize(datetime.strptime(end, format)) dtstart = dtutcstart.astimezone(paris) dtend = dtutcend.astimezone(paris) result = (u"Prochain cours le {date} de {start} à {end} :\n" "{description}").format( date=dtstart.strftime("%A %d/%m/%Y"), start=dtstart.strftime("%Hh%M"), end=dtend.strftime("%Hh%M"), description=description).encode('utf8').strip() return result
def extract_mail(**mail): agent = mail['from'] if agent == '*****@*****.**': subject = mail['subject'] content = mail['plain'] data = content_extractor(content) if data['account'] == '0113011666': if subject.find('Credit') != -1: try: obj = Wallet.objects.get(walletID=data['walletID']) obj.amount = obj.amount + data['amount'] obj.datetime = utc.localize(datetime.datetime.strptime(data['date']+" "+data['time'], '%d-%b-%Y %I:%M%p')) obj.ack = True obj.save() wall = WalletLog(wallet=obj, amount=data['amount'], datetime=utc.localize(datetime.datetime.utcnow()), report='savings') wall.save() except Wallet.DoesNotExist: return HttpResponseNotFound() elif subject.find('Debit') != -1: return HttpResponseNotFound() else: return HttpResponseNotFound() else: return HttpResponseNotFound()
def update_from_facebook(): resp = requests.get('https://graph.facebook.com/207150099413259/feed', params={'access_token': app.config['FACEBOOK_ACCESS_TOKEN']}) if 200 <= resp.status_code < 400: posts = resp.json()['data'] while posts: latest_post = posts.pop(0) if 'open' in latest_post['message'].lower() or 'close' in latest_post['message'].lower(): break latest_post['created_time'] = utc.localize(datetime.strptime( latest_post['created_time'], '%Y-%m-%dT%H:%M:%S+0000')) else: return False latest_db_record = Call.query.order_by(Call.date.desc()).first() if latest_db_record.date > latest_post['created_time']: return True if latest_db_record and latest_post['id'] == latest_db_record.id: utcnow = utc.localize(datetime.utcnow()) since_update = utcnow - latest_db_record.date eastern_hr = utcnow.astimezone(timezone('US/Eastern')).hour if 9 <= eastern_hr < 18: # 12 hr grace period if after 9AM, but before usual closing time return since_update < timedelta(hours=12) else: # 24 hr grace period if before 9AM (or after closing) return since_update < timedelta(hours=24) db.session.add(Call(id=latest_post['id'], recording_url='https://facebook.com/' + latest_post['id'], transcript=latest_post['message'], date=latest_post['created_time'])) db.session.commit() return True
def test_pledge_creation_triggers_pact_success_when_goal_met( self, create_notifications_for_users, send_notifications): """ Verify that when we create the last pledge needed to meet a Pact's goal, the Pact is updated and an Event is created appropriately. """ # Setup scenario now = utc_tz.localize(datetime(2015, 6, 1)) deadline = utc_tz.localize(datetime(2015, 6, 6)) pact = G(Pact, goal=2, deadline=deadline) # Verify initial assumptions self.assertEquals(0, Event.objects.count()) # Run code with freeze_time(now): G(Pledge, pact=pact) # Run code and verify expectations self.assertEqual(2, pact.pledge_count) self.assertTrue(Event.objects.filter(name='pact_goal_met').exists()) self.assertEqual({ 'subject': 'Pact Succeeded!', 'pact': pact.id, 'met_goal': True, }, Event.objects.get(name='pact_goal_met').context) self.assertEquals( set([p.account for p in pact.pledge_set.all()]), set(create_notifications_for_users.call_args_list[0][0][0])) self.assertEquals(1, send_notifications.call_count)
def test_calculate_pass_slot(self): """UNIT test: services.common.simualtion.trigger_event Validates the calculation of the pass slots that occur during an availability slot. """ # ### TODO Understand inaccuracies in between PyEphem and GPredict # ### TODO when the minimum contact elevation angle increases, # ### TODO what is the influence of body.compute(observer) within the # ### TODO simulation loop? if self.__verbose_testing: print('>>> test_calculate_pass_slot:') self.__simulator.set_groundstation(self.__gs_1) self.__simulator.set_spacecraft( tle_models.TwoLineElement.objects.get(identifier=self.__sc_1_tle_id) ) if self.__verbose_testing: print(self.__simulator.__unicode__()) pass_slots = self.__simulator.calculate_pass_slot( start=pytz_utc.localize(datetime.today()), end=pytz_utc.localize(datetime.today()) + timedelta(days=3) ) if self.__verbose_testing: print('# ### RESULTS:') for p in pass_slots: print('[' + str(p[0]) + ', ' + str(p[1]) + ']')
def handle(self, *args, **options): course_keys = None modified_start = None modified_end = None run_mode = get_mutually_exclusive_required_option(options, 'delete', 'dry_run') courses_mode = get_mutually_exclusive_required_option(options, 'courses', 'all_courses') db_table = options.get('db_table') if db_table not in {'subsection', 'course', None}: raise CommandError('Invalid value for db_table. Valid options are "subsection" or "course" only.') if options.get('modified_start'): modified_start = utc.localize(datetime.strptime(options['modified_start'], DATE_FORMAT)) if options.get('modified_end'): if not modified_start: raise CommandError('Optional value for modified_end provided without a value for modified_start.') modified_end = utc.localize(datetime.strptime(options['modified_end'], DATE_FORMAT)) if courses_mode == 'courses': course_keys = parse_course_keys(options['courses']) log.info("reset_grade: Started in %s mode!", run_mode) operation = self._query_grades if run_mode == 'dry_run' else self._delete_grades if db_table == 'subsection' or db_table is None: operation(PersistentSubsectionGrade, course_keys, modified_start, modified_end) if db_table == 'course' or db_table is None: operation(PersistentCourseGrade, course_keys, modified_start, modified_end) log.info("reset_grade: Finished in %s mode!", run_mode)
def order(group): global request if request == "": request = connect(group) # objet reçu de la connexion via la # fonction connect définie plus bas. paris = pytz.timezone('Europe/Paris') format = "%Y%m%dT%H%M%SZ" datefind = datetime(2014, 4, 16, 11) # careful: 04 is invalid. 4 # is. (octal numbers not allowed in python!) find = datefind.strftime("%d/%m/%Y/%Hh%M") ffind = utc.localize(datefind) fffind = ffind.astimezone(paris) #semaine de 4 à 22 (19 semaine) #semaine de 6 jour #jour de 8 crénaux #tableau de 19 * 6 * 8 = 912 case crenaux = [1]*912 for component in Calendar.from_ical(request.text).walk(): if component.name == 'VEVENT': start = component.get('DTSTART').to_ical() dtutcstart = utc.localize(datetime.strptime(start, format)) dtstart = dtutcstart.astimezone(paris) fstart = dtstart.strftime("%d/%m/%Y/%Hh%M") end = component.get('DTEND').to_ical() dtutcend = utc.localize(datetime.strptime(end, format)) dtend = dtutcend.astimezone(paris) fend = dtend.strftime("%d/%m/%Y/%Hh%M") getCrenaux(crenaux, dtstart, dtend) return crenaux
def _gacha_availability(self, cards, gacha_list): print("trace _gacha_availability", cards) gacha_map = {x.id: x for x in gacha_list} ga = defaultdict(lambda: []) for k in cards: ga[k] # force the empty list to be created and cached with self as s: ents = s.query(GachaPresenceEntry).filter(GachaPresenceEntry.card_id.in_(cards)).all() def getgacha(gid): if gid in gacha_map: return gacha_map[gid] else: return unknown_gacha_t("??? (unknown gacha ID: {0})".format(gid)) for e in ents: if e.gacha_id_first == e.gacha_id_last or getgacha(e.gacha_id_first).name == getgacha(e.gacha_id_last).name: name = getgacha(e.gacha_id_first).name else: name = None # FIXME do this better if name == "プラチナオーディションガシャ": name = None ga[e.card_id].append(Availability(Availability._TYPE_GACHA, name, utc.localize(datetime.utcfromtimestamp(e.avail_start)), utc.localize(datetime.utcfromtimestamp(e.avail_end)), [])) [v.sort(key=lambda x: x.start) for v in ga.values()] [combine_availability(v) for v in ga.values()] return ga
def check_results(self, holiday, start, end, expected): assert list(holiday.dates(start, end)) == expected # Verify that timezone info is preserved. assert (list(holiday.dates(utc.localize(Timestamp(start)), utc.localize(Timestamp(end)))) == [utc.localize(dt) for dt in expected])
def test_dates_between_earlier_end_by_programme(self): self.programme.end_date = datetime.date(2014, 1, 7) self.programme.save() self.schedule.refresh_from_db() self.assertCountEqual( self.schedule.dates_between( utc.localize(datetime.datetime(2014, 1, 1)), utc.localize(datetime.datetime(2014, 1, 14))), [utc.localize(datetime.datetime(2014, 1, 6, 14, 0))])
def _parse_value(self, val_el, obj): """ Parse the node val_el as a constant. """ if val_el is not None and val_el.text is not None: ntag = self._retag.match(val_el.tag).groups()[1] else: ntag = "Null" obj.valuetype = ntag if ntag == "Null": obj.value = None elif hasattr(ua.ua_binary.Primitives1, ntag): # Elementary types have their parsing directly relying on ua_type_to_python. obj.value = ua_type_to_python(val_el.text, ntag) elif ntag == "DateTime": obj.value = ua_type_to_python(val_el.text, ntag) # According to specs, DateTime should be either UTC or with a timezone. if obj.value.tzinfo is None or obj.value.tzinfo.utcoffset(obj.value) is None: utc.localize(obj.value) # FIXME Forcing to UTC if unaware, maybe should raise? elif ntag in ("ByteString", "String"): mytext = val_el.text if mytext is None: # Support importing null strings. mytext = "" mytext = mytext.replace('\n', '').replace('\r', '') obj.value = ua_type_to_python(mytext, ntag) elif ntag == "Guid": self._parse_contained_value(val_el, obj) # Override parsed string type to guid. obj.valuetype = ntag elif ntag == "NodeId": id_el = val_el.find("uax:Identifier", self.ns) if id_el is not None: obj.value = id_el.text elif ntag == "ExtensionObject": obj.value = self._parse_ext_obj(val_el) elif ntag == "LocalizedText": obj.value = self._parse_body(val_el) elif ntag == "ListOfLocalizedText": obj.value = self._parse_list_of_localized_text(val_el) elif ntag == "ListOfExtensionObject": obj.value = self._parse_list_of_extension_object(val_el) elif ntag.startswith("ListOf"): # Default case for "ListOf" types. # Should stay after particular cases (e.g.: "ListOfLocalizedText"). obj.value = [] for val_el in val_el: tmp = NodeData() self._parse_value(val_el, tmp) obj.value.append(tmp.value) else: # Missing according to string_to_val: XmlElement, ExpandedNodeId, # QualifiedName, StatusCode. # Missing according to ua.VariantType (also missing in string_to_val): # DataValue, Variant, DiagnosticInfo. self.logger.warning("Parsing value of type '%s' not implemented", ntag)
def test_between(self): between = Transmission.between( utc.localize(datetime.datetime(2015, 1, 6, 11, 0, 0)), utc.localize(datetime.datetime(2015, 1, 6, 17, 0, 0))) self.assertListEqual( [(t.slug, t.start) for t in list(between)], [('the-best-wine', utc.localize(datetime.datetime(2015, 1, 6, 11, 0))), ('local-gossips', utc.localize(datetime.datetime(2015, 1, 6, 12, 0))), ('classic-hits', utc.localize(datetime.datetime(2015, 1, 6, 14, 0)))])
def test_between_by_queryset(self): between = Transmission.between( utc.localize(datetime.datetime(2015, 1, 6, 12, 0, 0)), utc.localize(datetime.datetime(2015, 1, 6, 17, 0, 0)), schedules=Schedule.objects.filter( calendar=self.another_calendar).all()) self.assertListEqual( [(t.slug, t.start) for t in list(between)], [('classic-hits', utc.localize(datetime.datetime(2015, 1, 6, 16, 30, 0)))])
def utc(cls,shift): try: s = utc.localize(shift.start) e = utc.localize(shift.end) except: s = shift.start.astimezone(utc) e = shift.end.astimezone(utc) return shift_t(s,e)
def add_timestamps(self, item): """ Adds firstcreated and versioncreated timestamps to item :param item: object which can be saved to ingest collection :type item: dict """ item['firstcreated'] = utc.localize(item['firstcreated']) if item.get('firstcreated') else utcnow() item['versioncreated'] = utc.localize(item['versioncreated']) if item.get('versioncreated') else utcnow()
def backwards(self, orm): "Write your backwards methods here." for meeting in orm.Meeting.objects.all(): t = meeting.begin_time.replace(tzinfo=None) meeting.begin_time = utc.localize(t) t = meeting.end_time.replace(tzinfo=None) meeting.end_time = utc.localize(t) meeting.save()
def changes(request): """ select vs.id, v.updated, h.domainelement_pk, v.domainelement_pk from value_history \ as h, value as v, valueset as vs where h.pk = v.pk and v.valueset_pk = vs.pk; """ # changes in the 2011 edition: check values with an updated date after 2011 and # before 2013 E2009 = utc.localize(datetime(2009, 1, 1)) E2012 = utc.localize(datetime(2012, 1, 1)) E2014 = utc.localize(datetime(2014, 6, 30)) E2015 = utc.localize(datetime(2015, 6, 30)) history = inspect(Value.__history_mapper__).class_ query = DBSession.query(Value)\ .outerjoin(history, Value.pk == history.pk)\ .join(ValueSet)\ .order_by(ValueSet.parameter_pk, ValueSet.language_pk)\ .options(joinedload_all(Value.valueset, ValueSet.language), joinedload_all(Value.valueset, ValueSet.parameter)) changes2011 = query.join(ValueSet.parameter)\ .filter(Parameter.id.contains('A'))\ .filter(Parameter.id != '143A')\ .filter(Parameter.id != '144A')\ .filter(or_( and_(E2009 < Value.updated, Value.updated < E2012), and_(history.updated != None, E2009 < history.updated, history.updated < E2012))) changes2013 = query.filter(or_( and_(E2012 < Value.updated, Value.updated < E2014), and_(E2012 < history.updated, history.updated < E2014))) changes2014 = query.filter(or_( and_(E2014 < Value.updated, Value.updated < E2015), and_(E2014 < history.updated, history.updated < E2015))) # # TODO: # # history = inspect(ValueSet.__history_mapper__).class_ # current = DBSession.query(ValueSet.pk).subquery() # removals2013 = DBSession.query(Parameter.id, Parameter.name, count(history.pk))\ # .filter(Parameter.pk == history.parameter_pk)\ # .filter(not_(history.pk.in_(current)))\ # .group_by(Parameter.pk, Parameter.id, Parameter.name)\ # .order_by(Parameter.pk) grouped = lambda changes: groupby([v.valueset for v in changes2011], lambda vs: vs.parameter) return { 'changes2011': grouped(changes2011), 'changes2013': grouped(changes2013), 'changes2014': grouped(changes2014), 'removals2013': []}
def __init__(self, *args, **kwargs): user=kwargs['user'] del kwargs['user'] super(WriteSMSForm, self).__init__(*args, **kwargs) if user.is_superuser: self.fields['recipients'].queryset = User.objects.filter(is_active=True, mobile_marketing_optin=True).exclude(mobile='').order_by('last_name') else: self.fields['recipients'].queryset = User.objects.filter(chapter=user.chapter, is_active=True, mobile_marketing_optin=True).exclude(mobile='').order_by('last_name') self.fields['list'].queryset = UserList.objects.filter(chapter=user.chapter) self.fields['schedule_time'].initial = utc.localize(datetime.now()).astimezone(user.tz_obj()) self.fields['schedule_date'].initial = utc.localize(datetime.now()).astimezone(user.tz_obj())
def test_live_pacts_returned(self): """ Verify Pact.live_pacts only returns active pacts. """ # Setup scenario live = G(Pact, deadline=utc_tz.localize(datetime(2015, 4, 20))) G(Pact, deadline=utc_tz.localize(datetime(2015, 1, 1))) # Run code and verify expectations with freeze_time(datetime(2015, 4, 10)): self.assertEquals([live], list(Pact.objects.live_pacts))
def test_dates_between_includes_started_episode(self): self.assertCountEqual( self.schedule.dates_between( utc.localize(datetime.datetime(2014, 1, 2, 0, 0, 0)), utc.localize(datetime.datetime(2014, 1, 3, 23, 59, 59)) ), [ utc.localize(datetime.datetime(2014, 1, 1, 23, 30, 0)), # Not finished yet utc.localize(datetime.datetime(2014, 1, 2, 23, 30, 0)), utc.localize(datetime.datetime(2014, 1, 3, 23, 30, 0)), ] )
def test_number_to_datetime(self): pair = ( 1403272847, utc.localize(datetime(2014, 6, 20, 14, 0, 47, 0)), ) self.assertEqual(make_datetime(pair[0]), pair[1]) pair = ( 1403272847.361077, utc.localize(datetime(2014, 6, 20, 14, 0, 47, 361077)), ) self.assertEqual(make_datetime(pair[0]), pair[1])
def check_results(self, holiday, start, end, expected): self.assertEqual(list(holiday.dates(start, end)), expected) # Verify that timezone info is preserved. self.assertEqual( list( holiday.dates( utc.localize(Timestamp(start)), utc.localize(Timestamp(end)), ) ), [utc.localize(dt) for dt in expected], )
def test_rearrange_episodes(self): self.programme.rearrange_episodes(pytz.utc.localize(datetime.datetime(2015, 1, 1)), Calendar.get_active()) self.assertListEqual( [e.issue_date for e in self.programme.episode_set.all().order_by('issue_date')[:5]], [ utc.localize(datetime.datetime(2015, 1, 1, 14, 0)), utc.localize(datetime.datetime(2015, 1, 2, 14, 0)), utc.localize(datetime.datetime(2015, 1, 3, 14, 0)), utc.localize(datetime.datetime(2015, 1, 4, 14, 0)), utc.localize(datetime.datetime(2015, 1, 5, 14, 0)) ] )
def test_available_dates_after(self): Schedule.objects.create( programme=self.programme, calendar=self.calendar, type="L", start_dt=utc.localize(datetime.datetime(2015, 1, 6, 16, 0, 0)), recurrences=recurrence.Recurrence( rrules=[recurrence.Rule(recurrence.WEEKLY)])) dates = next_dates(self.calendar, self.programme, utc.localize(datetime.datetime(2015, 1, 5))) self.assertEqual(next(dates), utc.localize(datetime.datetime(2015, 1, 5, 14, 0))) self.assertEqual(next(dates), utc.localize(datetime.datetime(2015, 1, 6, 14, 0))) self.assertEqual(next(dates), utc.localize(datetime.datetime(2015, 1, 6, 16, 0)))
def get(self, request): url = "{}/docker/healing".format(settings.TSURU_HOST) resp = requests.get(url, headers=self.authorization).json() or [] healings = 0 for healing in resp: end_time = parser.parse(healing['EndTime']) if end_time.tzinfo: end_time = end_time.astimezone(utc) else: end_time = utc.localize(end_time) now = utc.localize(datetime.utcnow()) if (now - end_time < timedelta(days=1)): healings += 1 return JsonResponse({"healing": healings}, safe=False)
def affiche_cours(start, end, description): format = "%Y%m%dT%H%M%SZ" paris = pytz.timezone('Europe/Paris') dtutcstart = utc.localize(datetime.strptime(start, format)) dtutcend = utc.localize(datetime.strptime(end, format)) dtstart = dtutcstart.astimezone(paris) dtend = dtutcend.astimezone(paris) result = u"Prochain cours le {date} de {start} à {end}".format( date=dtstart.strftime("%A %d/%m/%Y"), start=dtstart.strftime("%Hh%M"), end=dtend.strftime("%Hh%M")) return result
def test_past_midnight(): """ integration of a past midnight """ navitia_vj = { 'trip': { 'id': 'vehicle_journey:1' }, 'stop_times': [ { 'utc_arrival_time': datetime.time(22, 10), 'utc_departure_time': datetime.time(22, 15), 'stop_point': { 'id': 'sa:1' } }, # arrive at sa:2 at 23:10 and leave the day after { 'utc_arrival_time': datetime.time(23, 10), 'utc_departure_time': datetime.time(2, 15), 'stop_point': { 'id': 'sa:2', 'stop_area': { 'timezone': 'UTC' } } }, { 'utc_arrival_time': datetime.time(3, 20), 'utc_departure_time': datetime.time(3, 25), 'stop_point': { 'id': 'sa:3', 'stop_area': { 'timezone': 'UTC' } } } ] } with app.app_context(): vj = VehicleJourney( navitia_vj, utc.localize(datetime.datetime(2015, 9, 8, 21, 15, 0)), utc.localize(datetime.datetime(2015, 9, 9, 4, 20, 0))) trip_update = TripUpdate(vj, status='update') st = StopTimeUpdate({'id': 'sa:2'}, departure_delay=timedelta(minutes=31), dep_status='update', order=1) real_time_update = RealTimeUpdate(raw_data=None, connector='cots', contributor='realtime.cots') trip_update.stop_time_updates.append(st) res, _ = handle(real_time_update, [trip_update], 'kisio-digital') assert len(res.trip_updates) == 1 trip_update = res.trip_updates[0] assert trip_update.status == 'update' assert len(trip_update.stop_time_updates) == 3 assert trip_update.stop_time_updates[0].stop_id == 'sa:1' assert trip_update.stop_time_updates[0].arrival == _dt("22:10") assert trip_update.stop_time_updates[0].departure == _dt("22:15") assert trip_update.stop_time_updates[1].stop_id == 'sa:2' assert trip_update.stop_time_updates[1].arrival == _dt("23:10") assert trip_update.stop_time_updates[1].arrival_delay == timedelta(0) assert trip_update.stop_time_updates[1].departure == _dt("2:46", day=9) assert trip_update.stop_time_updates[1].departure_delay == timedelta( minutes=31) assert trip_update.stop_time_updates[2].stop_id == 'sa:3' assert trip_update.stop_time_updates[2].arrival == _dt("3:20", day=9) assert trip_update.stop_time_updates[2].arrival_delay == timedelta(0) assert trip_update.stop_time_updates[2].departure == _dt("3:25", day=9) assert trip_update.stop_time_updates[2].departure_delay == timedelta(0)
def model_to_dict(instance, fields=None, exclude=None, many_to_many=False, i18n_fields=None, related_sets: tuple = None, many_to_many_fields: tuple = None, **kwargs): """ Return a dict containing the data in ``instance`` suitable for passing as a Form's ``initial`` keyword argument. ``fields`` is an optional list of field names. If provided, return only the named. ``exclude`` is an optional list of field names. If provided, exclude the named from the returned dict, even if they are listed in the ``fields`` argument. ``related_sets`` is an optional list of reversed many-to-many field names ``many_to_many_fields`` is an optional list of many-to-many field names, used to show m2m data """ if instance is None: return use_natural_foreign_keys = kwargs.get('use_natural_foreign_keys', False) opts = instance._meta data = {} default_exclude = SENSITIVE_FIELDS if exclude: exclude = [exclude] if isinstance(exclude, str) else list(exclude) exclude.extend(default_exclude) exclude = list(set(exclude)) else: exclude = default_exclude chains = chain(opts.concrete_fields, opts.private_fields) if many_to_many and many_to_many_fields: chains = chain(chains, opts.many_to_many) for f in chains: # if not getattr(f, 'editable', False): # continue if fields and f.name not in fields: continue if exclude and f.name in exclude: continue if isinstance(f, GenericForeignKey): value = getattr(instance, f.name) value = model_to_dict(value) else: value = f.value_from_object(instance) if isinstance(f, models.DateTimeField): if value is None: continue if isinstance(value, str): naive = dateparse.parse_datetime(value) aware = utc.localize(naive, is_dst=None) else: aware = timezone.localtime(value) value = aware.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(f, models.DateField): if value is None: continue if isinstance(value, str): naive = dateparse.parse_date(value) aware = utc.localize(naive, is_dst=None) else: aware = timezone.localdate(value) value = aware.strftime('%Y-%m-%d') elif isinstance(f, models.ForeignKey) and use_natural_foreign_keys: if hasattr(f.related_model, 'natural_key') and callable( getattr(f.related_model, 'natural_key')): fk = getattr(instance, f.name) value = f.related_model.natural_key(fk) if fk else fk # if not many_to_many_fields or many_to_many_fields and f.name in many_to_many_fields: # # only serialize fields in many_to_many_fields or many_to_many_fields is empty # if hasattr(f.model, 'natural_key') and callable(getattr(f.model, 'natural_key')): # value = instance._meta.model.natural_key(instance) else: # i18n if i18n_fields: assert isinstance(i18n_fields, (list, tuple, str)) i18n_fields = [i18n_fields] if isinstance(i18n_fields, str) else i18n_fields if f.name in i18n_fields: value = gettext(value) data[f.name] = value if related_sets: for rs in related_sets: if not hasattr(instance, rs): # try: # getattr(instance, rs) # except Exception as e: # if e.__class__.__name__ == 'RelatedObjectDoesNotExist': # data[rs] = None # continue raise AttributeError( "'%s' is not a valid attribute for model '%s'." % (rs, instance._meta.model_name)) item_set = getattr(instance, rs) if not item_set.__class__.__name__.endswith('RelatedManager'): raise AttributeError( "'%s' is not a valid value for 'related_sets', " "may be you need 'many_to_many_fields'?" % rs) data[rs] = queryset_to_list(item_set.all()) if many_to_many_fields: for m2m in many_to_many_fields: if not hasattr(instance, m2m): try: # 一对一反向 getattr(instance, m2m) except Exception as e: if e.__class__.__name__ == 'RelatedObjectDoesNotExist': data[m2m] = None continue raise AttributeError( "'%s' is not a valid attribute for model '%s'." % (m2m, instance._meta.model_name)) m2m_obj = getattr(instance, m2m) if m2m_obj is not None and not isinstance(m2m_obj, Model): raise AttributeError( "'%s' is not a valid value for 'many_to_many_fields', " "may be you need 'related_sets'?" % m2m) data[m2m] = model_to_dict(m2m_obj) data = json.dumps(data, cls=CJsonEncoder) data = json.loads(data) return data
def local_tz_filter(timestamp): local_timestamp = utc.localize(timestamp).astimezone(tz) return local_timestamp.strftime("%m/%d %H:%M")
logging.debug("KMS Request Bytes: \n%s\n" % justify(binascii.b2a_hex(str(kmsRequest)))) logging.debug("KMS Request: \n%s\n" % justify(kmsRequest.dump(print_to_stdout=False))) clientMachineId = kmsRequest['clientMachineId'].get() global applicationId applicationId = kmsRequest['applicationId'].get() skuId = kmsRequest['skuId'].get() requestDatetime = filetimes.filetime_to_dt(kmsRequest['requestTime']) # Try and localize the request time, if pytz is available try: import timezones from pytz import utc local_dt = utc.localize(requestDatetime).astimezone( timezones.localtz()) except ImportError: local_dt = requestDatetime infoDict = { "machineName": kmsRequest.getMachineName(), "clientMachineId": str(clientMachineId), "appId": self.appIds.get(applicationId, str(applicationId)), "skuId": self.skuIds.get(skuId, str(skuId)), "licenseStatus": kmsRequest.getLicenseStatus(), "requestTime": int(time.time()), "kmsEpid": None } #print infoDict logging.info("Machine Name: %s" % infoDict["machineName"])
def format_data(pd, gd, query, phid): """ :Summary: Format the data fetched, store the data to Databases and remove the irrelevant data. :param pd: Phabricator Data. :param gd: Gerrit Data :param query: Query Modal Object :param phid: Phabricator ID of the user. :return: JSON response of all the tasks in which the user involved, in the specified time span. """ resp = [] len_pd = len(pd) len_gd = len(gd) if len_pd > len_gd: leng = len_pd else: leng = len_gd temp = [] if query.queryfilter.status is not None and query.queryfilter.status != "": status_name = query.queryfilter.status.split(",") else: status_name = True with transaction.atomic(): ListCommit.objects.filter(query=query).delete() for i in range(0, leng): if i < len_pd: if pd[i]['phid'] not in temp: temp.append(pd[i]['phid']) date_time = datetime.fromtimestamp( int(pd[i]['fields']['dateCreated'])) date_time = choose_time_format_method( date_time.replace(hour=0, minute=0, second=0), "str") status = pd[i]['fields']['status']['name'].lower() if status_name is True or status in status_name or ( status == "open" and "p-open" in status_name): rv = { "time": date_time, "phabricator": True, "status": pd[i]['fields']['status']['name'], "owned": pd[i]['fields']['authorPHID'] == phid, "assigned": pd[i]['fields']['ownerPHID'] == True or phid == pd[i]['fields']['ownerPHID'] } resp.append(rv) ListCommit.objects.create( query=query, heading=pd[i]['fields']['name'], platform="Phabricator", created_on=date_time, redirect="T" + str(pd[i]['id']), status=pd[i]['fields']['status']['name'], owned=pd[i]['fields']['authorPHID'] == phid, assigned=pd[i]['fields']['ownerPHID'] == True or phid == pd[i]['fields']['ownerPHID']) if i < len_gd: date_time = utc.localize( datetime.strptime( gd[i]['created'].split(".")[0].split(" ")[0], "%Y-%m-%d")) if date_time.date( ) < query.queryfilter.end_time and date_time.date( ) > query.queryfilter.start_time: epouch = choose_time_format_method( date_time.replace(hour=0, minute=0, second=0), "int") status = gd[i]['status'].lower() if status_name is True or status in status_name or ( status == "open" and "g-open" in status_name): rv = { "time": epouch, "gerrit": True, "status": gd[i]['status'], "owned": True } resp.append(rv) ListCommit.objects.create(query=query, heading=gd[i]['subject'], platform="Gerrit", created_on=epouch, redirect=gd[i]['change_id'], status=gd[i]['status'], owned=True, assigned=True) return resp
def __do_ntp(self): """ Sleep thread which will send the NTP packets once every hour, and once at start. """ self.__logger.info( "Started NTP broadcast, will wait until next minute transition") self.__is_active = True while True and self.is_active(): # Send the NTP packets on next minute transition self.__send_next_transition() if not self.is_active(): break # Figure out how long to sleep: # Till next hour transition (if no synctime specified) # Till synctime # Till next DST transition now = utc.localize(datetime.utcnow()).astimezone(self.__timezone) # DST - if the timezone has it until_dst = None if hasattr(timezone(timezone), '_utc_transition_times'): until_dst = next( time for time in self.__timezone._utc_transition_times if time > datetime.utcnow()) until_dst = utc.localize(until_dst).astimezone(self.__timezone) # No synctime: one hour if (not "synctime" in self.__settings) or ( "synctime" in self.__settings and self.__settings["synctime"] == ""): until_synctime = now + timedelta(hours=1) - timedelta( minutes=now.minute, seconds=now.second, microseconds=now.microsecond) if not until_dst or until_synctime < until_dst: until = until_synctime else: until = until_dst # Synctime set, check between timesync and DST which is closest else: # Timesync splitted = self.__settings["synctime"].split(":") hh = int(splitted[0]) mm = int(splitted[1]) until_timesync = utc.localize(datetime.utcnow()).astimezone( self.__timezone).replace(hour=hh, minute=mm, second=0, microsecond=0) # Add one day if it has already passed if until_timesync < now: until_timesync = until_timesync + timedelta(days=1) if until_timesync < until_dst: until = until_timesync else: until = until_dst self.__logger.info( "Waiting for next NTP broadcast at {0}".format(until)) # Sleep until a minute before until = until - timedelta(minutes=1) dt = until - now self.__sleep_event.wait(dt.total_seconds())
def post_manage_league(league_id): name = request.form.get('league-name') num_tracks = request.form.get('tracks-submitted') upvote_size = request.form.get('point-bank-size') limit_upvotes = request.form.get('limit-upvotes') max_up_per_song = request.form.get('max-points-per-song') allow_downvotes = request.form.get('allow-downvotes') downvote_size = request.form.get('downvote-bank-size') limit_downvotes = request.form.get('limit-downvotes') max_down_per_song = request.form.get('max-downvotes-per-song') user_ids = json.loads(request.form.get('added-members', [])) added_members = [select_user(uid) for uid in user_ids] emails = json.loads(request.form.get('invited-members', [])) deleted_members = json.loads(request.form.get('deleted-members', [])) added_rounds = json.loads(request.form.get('added-rounds', [])) edited_rounds = json.loads(request.form.get('edited-rounds', [])) deleted_rounds = json.loads(request.form.get('deleted-rounds', [])) league = select_league(league_id) if name != league.name: league.name = name update_league(league) league.preferences.track_count = int(num_tracks) league.preferences.point_bank_size = int(upvote_size) league.preferences.max_points_per_song = 0 if limit_upvotes == 'yes': league.preferences.max_points_per_song = int(max_up_per_song or 0) league.preferences.downvote_bank_size = 0 league.preferences.max_downvotes_per_song = 0 if allow_downvotes == 'yes': league.preferences.downvote_bank_size = int(downvote_size or 0) if limit_downvotes == 'yes': league.preferences.max_downvotes_per_song = int(max_down_per_song or 0) upsert_league_preferences(league) for added_member in added_members: add_user(league, added_member.email, notify=True, user=added_member) for email in emails: add_user(league, email, notify=True) for deleted_member in deleted_members: remove_user(league, deleted_member) for added_round in added_rounds: submission_due_date_str = added_round['submission-due-date-utc'] submission_due_date = utc.localize( datetime.strptime(submission_due_date_str, '%m/%d/%y %I%p')) vote_due_date_str = added_round['voting-due-date-utc'] vote_due_date = utc.localize( datetime.strptime(vote_due_date_str, '%m/%d/%y %I%p')) create_submission_period(league, added_round['name'], added_round['description'], submission_due_date, vote_due_date) for edited_round in edited_rounds: submission_due_date_str = edited_round['submission-due-date-utc'] submission_due_date = utc.localize( datetime.strptime(submission_due_date_str, '%m/%d/%y %I%p')) vote_due_date_str = edited_round['voting-due-date-utc'] vote_due_date = utc.localize( datetime.strptime(vote_due_date_str, '%m/%d/%y %I%p')) round = select_round(edited_round['id']) if not round: continue round.league = league update_submission_period(edited_round['id'], edited_round['name'], edited_round['description'], submission_due_date, vote_due_date, submission_period=round) for deleted_round in deleted_rounds: try: remove_submission_period(deleted_round) except Exception as e: app.logger.warning('Error while attempting to delete round %s: %s', deleted_round, str(e)) if league.scoreboard: league = select_league(league_id) calculate_league_scoreboard(league) app.logger.info('User modified league', extra={ 'league': league.id, 'user': g.user.id }) return redirect(url_for('view_league', league_id=league_id))
def longdate(date): return utc.localize(date).astimezone(get_timezone()).strftime('%B %e, %Y')
def is_dst(tzdata): tz = timezone('US/Eastern') now = utc.localize(tzdata) return now.astimezone(tz).dst() != timedelta(0)
def onPrivmsg(self, irc, msg, channel, user): """Looks for a '!dates' command in messages posted to the channel and returns a list of dates within the next week. irc: An instance of the bytebot. Will be passed by the plugin loader msg: The msg sent to the channel channel: The channels name user: The user who sent the message """ if msg.find('!dates') == -1: return f = urlopen(BYTEBOT_PLUGIN_CONFIG['dates']['url']) cal = Calendar.from_ical(f.read()) now = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) then = now + timedelta( days=BYTEBOT_PLUGIN_CONFIG['dates']['timedelta']) found = 0 data = [] timezoneEF = timezone('Europe/Berlin') fmt = "%d.%m.%Y %H:%M" # iterate all VEVENTS for ev in cal.walk('VEVENT'): start = vDDDTypes.from_ical(ev["DTSTART"]) """ check if DTSTART could be casted into some instance of dateime. If so, this indicates an event with a given start and stop time. There are other events too, e.g. Events lasting a whole day. Reading DTSTART of such whole day events will result in some instance of date. We will handle this case later. """ if isinstance(start, datetime): rset = rruleset() # create a set of recurrence rules info = "" loc = "" """ Everyone interested in calendar events wants to get some summary about the event. So each event handled here has to have a SUMMARY. If not, we will discard handling the VEVENT here """ if "SUMMARY" in ev: found += 1 info = ev["SUMMARY"].encode("utf-8") else: continue # events ohne summary zeigen wir nicht an! """ Printing the location of an event is important too. However, the string containing location info may be too long to be viewed nicely in IRC. Since there exits no good solution for this, this feature is coming soon. if "LOCATION" in ev: loc = ev["LOCATION"] else: loc = "Liebknechtstrasse 8" Recurrence handling starts here. First, we check if there is a recurrence rule (RRULE) inside the VEVENT, If so, we use the ical like expression of DTSTART and RRULE to feed our ruleset with. """ if "RRULE" in ev: # recurrence ical_dtstart = (ev.get("DTSTART")).to_ical() ical_rrule = (ev.get('RRULE')).to_ical() rset.rrule( rrulestr(ical_rrule, dtstart=parse(ical_dtstart), ignoretz=1)) """ the ruleset now may be used to calculate any datetime the event happened and will happen. Since we are only interested in upcoming events between now and then, we just use the between() method of the ruleset which will return an array of datetimes. Since timeutils cares about tumezones, no convertion between utc and ME(S)Z needs to be done. We just iterate the array of datetimes and put starting time (DTSTART) info (SUMMARY) and location (LOCATION) into our "database" of events """ for e in rset.between(now, then): data.append({ 'datetime': e.strftime(fmt), 'datetime_sort': e.strftime(fmt), 'info': info, 'loc': loc, }) """ Recurrence rules do also know about EXDATEs, handling this should be easy through rset (ruleset)... TODO handling of EXDATE """ else: # no recurrence """ there was no recurrence rule (RRULE), so we do not need to handle recurrece for this VEVENT. We do, however, need to handle conversion between UTC and ME(S)Z, because now timeutils, which would do this for us automatically, is not involved first we check if the DTSTART is between now and then. If so, we put the VEVENTs starttime (DTSTART), info (SUMMARY) and location (LOCATION) into our database. """ if start < utc.localize(now) or start > utc.localize(then): continue data.append({ 'datetime': start.astimezone(timezoneEF).strftime(fmt), 'datetime_sort': start.astimezone(timezoneEF).strftime(fmt), 'info': info, 'loc': loc, }) """ So far we only have handled short time events, but there are whole day events too. So lets handle them here... TODO handling of whole day events if isinstance(start, date): """ # lets sort our database, nearest events coming first... data = sorted(data, key=lambda k: time.mktime( datetime.strptime(k['datetime_sort'], "%d.%m.%Y %H:%M").timetuple())) """ spit out all events in database into IRC. If there were no events, print some message about this... """ for ev in data: irc.msg(channel, " %s - %s" % (ev['datetime'], ev['info'])) if found == 0: irc.msg(channel, "No dates during the next week") f.close()
def _get_dt_in_event_tz(self, datetimes, event): tz_name = event.date_tz return [ utc.localize(dt, is_dst=False).astimezone(timezone(tz_name)) for dt in datetimes ]
def _event_tracks_get_values(self, event, tag=None, **searches): # init and process search terms searches.setdefault('search', '') searches.setdefault('search_wishlist', '') searches.setdefault('tags', '') search_domain = self._get_event_tracks_base_domain(event) # search on content if searches.get('search'): search_domain = expression.AND( [search_domain, [('name', 'ilike', searches['search'])]]) # search on tags search_tags = self._get_search_tags(searches['tags']) if not search_tags and tag: # backward compatibility search_tags = tag if search_tags: # Example: You filter on age: 10-12 and activity: football. # Doing it this way allows to only get events who are tagged "age: 10-12" AND "activity: football". # Add another tag "age: 12-15" to the search and it would fetch the ones who are tagged: # ("age: 10-12" OR "age: 12-15") AND "activity: football grouped_tags = dict() for search_tag in search_tags: grouped_tags.setdefault(search_tag.category_id, list()).append(search_tag) search_domain_items = [[('tag_ids', 'in', [tag.id for tag in grouped_tags[group]])] for group in grouped_tags] search_domain = expression.AND( [search_domain, *search_domain_items]) # fetch data to display with TZ set for both event and tracks now_tz = utc.localize(fields.Datetime.now().replace(microsecond=0), is_dst=False).astimezone(timezone(event.date_tz)) today_tz = now_tz.date() event = event.with_context(tz=event.date_tz or 'UTC') tracks_sudo = event.env['event.track'].sudo().search(search_domain, order='date asc') tag_categories = request.env['event.track.tag.category'].sudo().search( []) # filter on wishlist (as post processing due to costly search on is_reminder_on) if searches.get('search_wishlist'): tracks_sudo = tracks_sudo.filtered( lambda track: track.is_reminder_on) # organize categories for display: announced, live, soon and day-based tracks_announced = tracks_sudo.filtered(lambda track: not track.date) tracks_wdate = tracks_sudo - tracks_announced date_begin_tz_all = list( set(dt.date() for dt in self._get_dt_in_event_tz( tracks_wdate.mapped('date'), event))) date_begin_tz_all.sort() tracks_sudo_live = tracks_wdate.filtered( lambda track: track.is_published and track.is_track_live) tracks_sudo_soon = tracks_wdate.filtered( lambda track: track.is_published and not track.is_track_live and track.is_track_soon) tracks_by_day = [] for display_date in date_begin_tz_all: matching_tracks = tracks_wdate.filtered( lambda track: self._get_dt_in_event_tz([track.date], event)[ 0].date() == display_date) tracks_by_day.append({ 'date': display_date, 'name': display_date, 'tracks': matching_tracks }) if tracks_announced: tracks_announced = tracks_announced.sorted('wishlisted_by_default', reverse=True) tracks_by_day.append({ 'date': False, 'name': _('Coming soon'), 'tracks': tracks_announced }) for tracks_group in tracks_by_day: # the tracks group is folded if all tracks are done (and if it's not "today") tracks_group['default_collapsed'] = ( today_tz != tracks_group['date']) and all( track.is_track_done and not track.is_track_live for track in tracks_group['tracks']) # return rendering values return { # event information 'event': event, 'main_object': event, # tracks display information 'tracks': tracks_sudo, 'tracks_by_day': tracks_by_day, 'tracks_live': tracks_sudo_live, 'tracks_soon': tracks_sudo_soon, 'today_tz': today_tz, # search information 'searches': searches, 'search_key': searches['search'], 'search_wishlist': searches['search_wishlist'], 'search_tags': search_tags, 'tag_categories': tag_categories, # environment 'is_html_empty': is_html_empty, 'hostname': request.httprequest.host.split(':')[0], 'is_event_user': request.env.user.has_group('event.group_event_user'), }
def get_datetime(date_text): return utc.localize(datetime.strptime(date_text, "%Y-%m-%d %H:%M:%S"))
def _parse_value(self, val_el, obj): """ Parse the node val_el as a constant. """ if val_el is not None and val_el.text is not None: ntag = self._retag.match(val_el.tag).groups()[1] else: ntag = "Null" obj.valuetype = ntag if ntag == "Null": obj.value = None elif hasattr(ua.ua_binary.Primitives1, ntag): # Elementary types have their parsing directly relying on ua_type_to_python. obj.value = ua_type_to_python(val_el.text, ntag) elif ntag == "DateTime": obj.value = ua_type_to_python(val_el.text, ntag) # According to specs, DateTime should be either UTC or with a timezone. if obj.value.tzinfo is None or obj.value.tzinfo.utcoffset( obj.value) is None: utc.localize( obj.value ) # FIXME Forcing to UTC if unaware, maybe should raise? elif ntag == "ByteString": if val_el.text is None: mytext = b"" else: mytext = val_el.text.encode() mytext = base64.b64decode(mytext) obj.value = mytext elif ntag == "String": mytext = val_el.text if mytext is None: # Support importing null strings. mytext = "" # mytext = mytext.replace('\n', '').replace('\r', '') obj.value = mytext elif ntag == "Guid": self._parse_contained_value(val_el, obj) # Override parsed string type to guid. obj.valuetype = ntag elif ntag == "NodeId": id_el = val_el.find("uax:Identifier", self.ns) if id_el is not None: obj.value = id_el.text elif ntag == "ExtensionObject": obj.value = self._parse_ext_obj(val_el) elif ntag == "LocalizedText": obj.value = self._parse_body(val_el) elif ntag == "ListOfLocalizedText": obj.value = self._parse_list_of_localized_text(val_el) elif ntag == "ListOfExtensionObject": obj.value = self._parse_list_of_extension_object(val_el) elif ntag.startswith("ListOf"): # Default case for "ListOf" types. # Should stay after particular cases (e.g.: "ListOfLocalizedText"). obj.value = [] for val_el in val_el: tmp = NodeData() self._parse_value(val_el, tmp) obj.value.append(tmp.value) else: # Missing according to string_to_val: XmlElement, ExpandedNodeId, # QualifiedName, StatusCode. # Missing according to ua.VariantType (also missing in string_to_val): # DataValue, Variant, DiagnosticInfo. self.logger.warning("Parsing value of type '%s' not implemented", ntag)
def admin_data(edition): d_tshirt = dict(TSHIRT_SIZES) d_referrer = dict(REFERRERS) d_category = dict(USER_CATEGORIES) tz = timezone(app.config['TIMEZONE']) headers = [ ('no', u'Sl No'), ('regdate', u'Date'), ('name', u'Name'), ('email', u'Email'), ('company', u'Company'), ('jobtitle', u'Job Title'), ('twitter', u'Twitter'), ('tshirt', u'T-shirt Size'), ('referrer', u'Referrer'), ('category', u'Category'), ('ipaddr', u'IP Address'), ('approved', u'Approved'), ('RSVP', u'RSVP'), ('agent', u'User Agent'), ('reason', u'Reason'), ] data = ({ 'no': i + 1, 'regdate': utc.localize(p.regdate).astimezone(tz).strftime('%Y-%m-%d %H:%M'), 'name': p.fullname, 'email': p.email, 'company': p.company, 'jobtitle': p.jobtitle, 'twitter': p.twitter, 'tshirt': d_tshirt.get(str(p.tshirtsize), p.tshirtsize), 'referrer': d_referrer.get(str(p.referrer), p.referrer), 'category': d_category.get(str(p.category), p.category), 'ipaddr': p.ipaddr, 'approved': { True: 'Yes', False: 'No' }[p.approved], 'rsvp': { 'A': u'', 'Y': u'Yes', 'M': u'Maybe', 'N': u'No' }[p.rsvp], 'agent': p.useragent, 'reason': p.reason, } for i, p in enumerate(Participant.query.filter_by(edition=edition))) return render_template('datatable.html', headers=headers, data=data, title=u'Participant data')
def parse_date(date_string): """ Converts an isoformat string into a python datetime object. Localizes that datetime object to UTC. """ return utc.localize(datetime.strptime(date_string, "%Y-%m-%d"))
def _create_db_vj(navitia_vj): return VehicleJourney( navitia_vj, utc.localize(datetime.datetime(2015, 9, 8, 7, 10, 0)), utc.localize(datetime.datetime(2015, 9, 8, 11, 5, 0)))
def utc_to_market_time(self,timestamp): """ Converts a UTC timestampe to local market time""" utc_time=utc.localize(timestamp) market_time=utc_time.astimezone(MARKET_TIMEZONE) return market_time
def _add_global_test_data(session): global_passwords['contributor'] = 'super pass' global_passwords['contributor2'] = 'better pass' global_passwords['moderator'] = 'even better pass' global_passwords['robot'] = 'bombproof pass' contributor_profile = UserProfile( categories=['amateur'], locales=[ DocumentLocale(title='', description='Me', lang='en'), DocumentLocale(title='', description='Moi', lang='fr') ], geometry=DocumentGeometry(geom='SRID=3857;POINT(635956 5723604)')) contributor = User(name='Contributor', username='******', email='*****@*****.**', forum_username='******', password='******', email_validated=True, profile=contributor_profile) contributor2_profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(title='...', lang='en')]) contributor2 = User(name='Contributor 2', username='******', email='*****@*****.**', forum_username='******', password='******', email_validated=True, profile=contributor2_profile) contributor3_profile = UserProfile( categories=['amateur'], locales=[DocumentLocale(title='...', lang='en')]) contributor3 = User(name='Contributor 3', username='******', email='*****@*****.**', forum_username='******', password='******', email_validated=True, profile=contributor3_profile) moderator_profile = UserProfile( categories=['mountain_guide'], locales=[DocumentLocale(title='', lang='en')]) moderator = User(name='Moderator', username='******', email='*****@*****.**', forum_username='******', moderator=True, password='******', email_validated=True, profile=moderator_profile) robot_profile = UserProfile(locales=[DocumentLocale(title='', lang='en')]) robot = User(name='Robot', username='******', email='*****@*****.**', forum_username='******', robot=True, password='******', email_validated=True, profile=robot_profile) users = [robot, moderator, contributor, contributor2, contributor3] session.add_all(users) session.flush() domain = 'www.somewhere.com' sso_key = SsoKey(domain=domain, key=domain) session.add(sso_key) sso_external_id = SsoExternalId( domain=domain, external_id='1', user=contributor, token='token', expire=utc.localize(datetime.datetime.utcnow()), ) session.add(sso_external_id) session.flush() key = settings['jwtauth.master_secret'] algorithm = 'HS256' now = datetime.datetime.utcnow() exp = now + datetime.timedelta(weeks=10) for user in [robot, moderator, contributor, contributor2, contributor3]: claims = create_claims(user, exp) token = jwt.encode(claims, key=key, algorithm=algorithm). \ decode('utf-8') add_or_retrieve_token(token, exp, user.id) global_userids[user.username] = user.id global_tokens[user.username] = token
def date_filter(timestamp): date_time = utc.localize(timestamp).astimezone(tz) return date_time.strftime("%d-%b")
res = '' for s in BeautifulSoup(html).stripped_strings: res = '%s %s' % (res, s) if len(res) > 100: break return res.strip() def get_vs2008(args): # pragma: no cover vs2008 = {} for row in reader(args.data_file('datapoints_2008.csv'), delimiter=','): vs2008[(row[0], '%sA' % row[1])] = int(row[2]) return vs2008 E2008 = utc.localize(datetime(2008, 4, 21)) E2011 = utc.localize(datetime(2011, 4, 28)) E2013 = utc.localize(datetime(2013, 11, 15)) data = Data(created=E2008, updated=E2008) def migrate(from_, to_, converter): # pragma: no cover for row in DB.execute("select * from %s" % from_): res = converter(row) if not res: continue if isinstance(res, dict): DBSession.add(to_(**res)) else: data.add(to_, res[0], **res[1]) DBSession.flush()
def get_local_datetime(): # Thanks Stack Overflow https://stackoverflow.com/a/25265611/424301 return utc.localize(datetime.now(), is_dst=None).astimezone(config.TIMEZONE)
def custom_time_converter( *args): # https://stackoverflow.com/a/45805464/974287 utc_dt = utc.localize(datetime.utcnow()) my_tz = timezone("Europe/Rome") converted = utc_dt.astimezone(my_tz) return converted.timetuple()
def local_tz_filter(timestamp): jst = timezone('Asia/Tokyo') jst_timestamp = utc.localize(timestamp).astimezone(jst) return jst_timestamp.strftime("%Y/%m/%d %H:%M:%S")
def string_to_datetime(value): """ Convert the given string value to a datetime in UTC. """ return utc.localize(fields.Datetime.from_string(value))
def shortdate(date): return utc.localize(date).astimezone(get_timezone()).strftime('%b %e')
async def info(self, ctx: Context, karma: clean_content): await ctx.trigger_typing() t_start = current_milli_time() # Strip any leading @s and get the item from the DB karma_stripped = karma.lstrip("@") karma_item = (db_session.query(KarmaModel).filter( func.lower(KarmaModel.name) == func.lower(karma_stripped)).first()) # If the item doesn't exist then raise an error if not karma_item: raise KarmaError( message=f"\"{karma_stripped}\" hasn't been karma'd yet. :cry:") # Get the changes and plot the graph filename, path = await plot_karma({karma_stripped: karma_item.changes}) # Get the user with the most karma # I'd use a group_by sql statement here but it seems to not terminate all_changes = (db_session.query(KarmaChange).filter( KarmaChange.karma_id == karma_item.id).order_by( KarmaChange.created_at.asc()).all()) user_changes = defaultdict(list) for change in all_changes: user_changes[change.user].append(change) most_karma = max(user_changes.items(), key=lambda item: len(item[1])) # Calculate the approval rating of the karma approval = 100 * ((karma_item.pluses - karma_item.minuses) / (karma_item.pluses + karma_item.minuses)) mins_per_karma = (all_changes[-1].local_time - all_changes[0].local_time).total_seconds() / ( 60 * len(all_changes)) time_taken = (current_milli_time() - t_start) / 1000 # Attach the file as an image for dev purposes if CONFIG.DEBUG: # Attach the file as an image for dev purposes plot_image = open(path, mode="rb") plot = File(plot_image) await ctx.send( f'Here\'s the karma trend for "{karma_stripped}" over time', file=plot) else: # Construct the embed generated_at = datetime.strftime( utc.localize(datetime.utcnow()).astimezone( timezone("Europe/London")), "%H:%M %d %b %Y", ) embed_colour = Color.from_rgb(61, 83, 255) embed_title = f'Statistics for "{karma_stripped}"' embed_description = f'"{karma_stripped}" has been karma\'d {len(all_changes)} {pluralise(all_changes, "time")} by {len(user_changes.keys())} {pluralise(user_changes.keys(), "user")}.' embed = Embed(title=embed_title, description=embed_description, color=embed_colour) embed.add_field( name="Most karma'd", value= f'"{karma_stripped}" has been karma\'d the most by <@{most_karma[0].user_uid}> with a total of {len(most_karma[1])} {pluralise(most_karma[1], "change")}.', ) embed.add_field( name="Approval rating", value= f'The approval rating of "{karma_stripped}" is {approval:.1f}% ({karma_item.pluses} positive to {karma_item.minuses} negative karma and {karma_item.neutrals} neutral karma).', ) embed.add_field( name="Karma timeline", value= f'"{karma_stripped}" was first karma\'d on {datetime.strftime(all_changes[0].local_time, "%d %b %Y at %H:%M")} and has been karma\'d approximately every {mins_per_karma:.1f} minutes.', ) embed.set_footer( text= f"Statistics generated at {generated_at} in {time_taken:.3f} seconds." ) embed.set_image(url="{host}/{filename}".format( host=CONFIG.FIG_HOST_URL, filename=filename)) display_name = get_name_string(ctx.message) await ctx.send(f"Here you go, {display_name}! :page_facing_up:", embed=embed)
def custom_time(*args): utc_dt = utc.localize(datetime.datetime.utcnow()) my_tz = timezone("Asia/Shanghai") converted = utc_dt.astimezone(my_tz) return converted.timetuple()
async def plot(self, ctx: Context, *args: clean_content): await ctx.trigger_typing() t_start = current_milli_time() # If there are no arguments if not args: raise KarmaError(message="I can't") karma_dict = dict() failed = [] # Iterate over the karma item(s) for karma in args: karma_stripped = karma.lstrip("@") karma_item = (db_session.query(KarmaModel).filter( func.lower(KarmaModel.name) == func.lower( karma_stripped)).first()) # Bucket the karma item(s) based on existence in the database if not karma_item: failed.append((karma_stripped, "hasn't been karma'd")) continue # Check if the topic has been karma'd >=10 times if len(karma_item.changes) < 5: failed.append(( karma_stripped, f"must have been karma'd at least 5 times before a plot can be made (currently karma'd {len(karma_item.changes)} {pluralise(karma_item.changes, 'time')})", )) continue # Add the karma changes to the dict karma_dict[karma_stripped] = karma_item.changes # Plot the graph and save it to a png filename, path = await plot_karma(karma_dict) t_end = current_milli_time() if CONFIG.DEBUG: # Attach the file as an image for dev purposes plot_image = open(path, mode="rb") plot = File(plot_image) await ctx.send(f'Here\'s the karma trend for "{karma}" over time', file=plot) else: # Construct the embed generated_at = datetime.strftime( utc.localize(datetime.utcnow()).astimezone( timezone("Europe/London")), "%H:%M %d %b %Y", ) time_taken = (t_end - t_start) / 1000 total_changes = reduce( lambda count, size: count + size, map(lambda t: len(t[1]), karma_dict.items()), 0, ) # Construct the embed strings if karma_dict.keys(): embed_colour = Color.from_rgb(61, 83, 255) embed_description = f'Tracked {len(karma_dict.keys())} {pluralise(karma_dict.keys(), "topic")} with a total of {total_changes} changes' embed_title = ( f"Karma trend over time for {comma_separate(list(karma_dict.keys()))}" if len(karma_dict.keys()) == 1 else f"Karma trends over time for {comma_separate(list(karma_dict.keys()))}" ) else: embed_colour = Color.from_rgb(255, 23, 68) embed_description = f'The following {pluralise(failed, "problem")} occurred whilst plotting:' embed_title = f"Could not plot karma for {comma_separate(list(map(lambda i: i[0], failed)))}" embed = Embed(color=embed_colour, title=embed_title, description=embed_description) # If there were any errors then add them for karma, reason in failed: embed.add_field(name=f'Failed to plot "{karma}"', value=f" • {reason}") # There was something plotted so attach the graph if karma_dict.keys(): embed.set_footer( text= f"Graph generated at {generated_at} in {time_taken:.3f} seconds" ) embed.set_image(url="{host}/{filename}".format( host=CONFIG.FIG_HOST_URL, filename=filename)) display_name = get_name_string(ctx.message) await ctx.send( f"Here you go, {display_name}! :chart_with_upwards_trend:", embed=embed)
S3_LOGS_BUCKET="fdio-logs-s3-cloudfront-index" S3_DOCS_BUCKET="fdio-docs-s3-cloudfront-index" PATH=f"s3://{S3_LOGS_BUCKET}/vex-yul-rot-jenkins-1/csit-*-perf-*" SUFFIX="info.json.gz" IGNORE_SUFFIX=[ "suite.info.json.gz", "setup.info.json.gz", "teardown.info.json.gz", "suite.output.info.json.gz", "setup.output.info.json.gz", "teardown.output.info.json.gz" ] LAST_MODIFIED_END=utc.localize( datetime.strptime( f"{datetime.now().year}-{datetime.now().month}-{datetime.now().day}", "%Y-%m-%d" ) ) LAST_MODIFIED_BEGIN=LAST_MODIFIED_END - timedelta(1) def flatten_frame(nested_sdf): """Unnest Spark DataFrame in case there nested structered columns. :param nested_sdf: Spark DataFrame. :type nested_sdf: DataFrame :returns: Unnest DataFrame. :rtype: DataFrame """ stack = [((), nested_sdf)] columns = []