def prepara_certificado_txt(self, cert_txt): # # Para dar certo a leitura pelo xmlsec, temos que separar o certificado # em linhas de 64 caracteres de extensão... # cert_txt = cert_txt.replace('\n', '') cert_txt = cert_txt.replace('-----BEGIN CERTIFICATE-----', '') cert_txt = cert_txt.replace('-----END CERTIFICATE-----', '') linhas_certificado = ['-----BEGIN CERTIFICATE-----\n'] for i in range(0, len(cert_txt), 64): linhas_certificado.append(cert_txt[i:i+64] + '\n') linhas_certificado.append('-----END CERTIFICATE-----\n') self.certificado = ''.join(linhas_certificado) cert_openssl = crypto.load_certificate(crypto.FILETYPE_PEM, self.certificado) self.cert_openssl = cert_openssl self._emissor = dict(cert_openssl.get_issuer().get_components()) self._proprietario = dict(cert_openssl.get_subject().get_components()) self._numero_serie = cert_openssl.get_serial_number() self._data_inicio_validade = datetime.strptime(cert_openssl.get_notBefore(), '%Y%m%d%H%M%SZ') self._data_inicio_validade = UTC.localize(self._data_inicio_validade) self._data_fim_validade = datetime.strptime(cert_openssl.get_notAfter(), '%Y%m%d%H%M%SZ') self._data_fim_validade = UTC.localize(self._data_fim_validade) for i in range(cert_openssl.get_extension_count()): extensao = cert_openssl.get_extension(i) self._extensoes[extensao.get_short_name()] = extensao.get_data()
def unixtimestamp(datetime): """Get unix time stamp from that given datetime. If datetime is not tzaware then it's assumed that it is UTC """ epoch = UTC.localize(datetime.utcfromtimestamp(0)) if not datetime.tzinfo: dt = UTC.localize(datetime) else: dt = UTC.normalize(datetime) delta = dt - epoch return total_seconds(delta)
def build_feed(self): "Build the feed given our existing URL" # Get all the episodes page_content = str(requests.get(self.url).content) parser = BassdriveParser() parser.feed(page_content) links = parser.get_links() # And turn them into something usable fg = FeedGenerator() fg.id(self.url) fg.title(self.title) fg.description(self.title) fg.author({'name': self.dj}) fg.language('en') fg.link({'href': self.url, 'rel': 'alternate'}) fg.logo(self.logo) for link in links: fe = fg.add_entry() fe.author({'name': self.dj}) fe.title(link[0]) fe.description(link[0]) fe.enclosure(self.url + link[1], 0, 'audio/mpeg') # Bassdrive always uses date strings of # [yyyy.mm.dd] with 0 padding on days and months, # so that makes our lives easy date_start = link[0].find('[') date_str = link[0][date_start:date_start+12] published = datetime.strptime(date_str, '[%Y.%m.%d]') fe.pubdate(UTC.localize(published)) fe.guid((link[0])) return fg
def list_events(): """List events for ajax supplied start and end.""" start = request.args.get('start', 0, type=int) end = request.args.get('end', 0, type=int) # make a datetime from the timestamp start = datetime.datetime.fromtimestamp(start) end = datetime.datetime.fromtimestamp(end) # Fetch the events from MongoDB _events = Event.query.filter(Event.timestamp >= start, Event.timestamp <= end) # Build the json so FullCalendar will swallow it events = [] for event in _events: # Localize the time and get rid of the microseconds event_start = UTC.localize(event.timestamp).astimezone(timezone('US/Central')).replace(microsecond=0) # Make the event 5 minutes event_end = (event_start + datetime.timedelta(minutes=5)).replace(microsecond=0) event_dict = {'title' : event.context.description, 'id' : event.id, 'allDay' : False, 'start' : event_start.isoformat(), 'end' : event_end.isoformat(), 'url' : '/event/{0}'.format(event.id)} # Url for the event detail if event.context.status != 0: event_dict.update({'color' : 'red'}) events.append(event_dict) return json.dumps(events)
def format_date(date, tzinfo=None): ### TODO: calling this function with the Plone default datetime (year=1000) ### will generate exceptions. This case must be handled ### This can be aggrivated by requesting an article that does not exist, ### through articles.xml view, among other ways. #Are we a datetime or a DateTime? if hasattr(date, 'utcdatetime'): #we are a Zope DateTime! date = date.utcdatetime() #else: #we are a python datetime! # date = date #calling this function 'naive' will attach a UTC timezone if date.tzinfo is None and tzinfo is None: date = UTC.localize(date) #calling this function with a tzinfo arg will: elif tzinfo is not None: if date.tzinfo is None: #localize an existing naive datetime date = tzinfo.localize(date) else: #convert a non-naive datetime into the provided timezone date = date.astimezone(tzinfo) if date.tzinfo.tzname(date) is not None: return date.strftime("%a, %d %b %Y %H:%M:%S %Z") else: #we only have a UTC offset return date.strftime("%a, %d %b %Y %H:%M:%S %z")
def on_topicinfo(self, c, e): model.Session.remove() args = e.arguments() log.debug(args) channel = args[0].lstrip(''.join(CHANNEL_PREFIXES)) changed_by = args[1] changed_on = UTC.localize(datetime.utcfromtimestamp(float(args[2]))) channel_participation_id = self.channels[channel] channel_participation = model.Session.query(model.ChannelParticipation) \ .get(channel_participation_id) channel_info = model.Session.query(model.ChannelTopicInfo) \ .get(channel_participation_id) if not channel_info: log.debug('on_topicinfo ChannelTopicInfo for %s non-existant' % channel) channel_info = model.ChannelTopicInfo(channel_participation, changed_by, changed_on) else: if channel_info.changed_by != changed_by or \ channel_info.changed_on != changed_on: log.debug(self.__class__.__name__ + ' updating topic info for channel %s...' % channel) if channel_info.changed_by != changed_by: channel_info.changed_by = changed_by if channel_info.changed_on != changed_on: channel_info.changed_on = changed_on model.Session.save(channel_info) model.Session.commit()
def handle(self, *args, **options): if Vaccine.objects.exists() or Pet.objects.exists(): print('Pet data already loaded...exiting.') print(ALREADY_LOADED_ERROR_MESSAGE) return print("Creating vaccine data") for vaccine_name in VACCINES_NAMES: vac = Vaccine(name=vaccine_name) vac.save() print("Loading pet data for pets available for adoption") for row in DictReader(open('./pet_data.csv')): pet = Pet() pet.name = row['Pet'] pet.submitter = row['Submitter'] pet.species = row['Species'] pet.breed = row['Breed'] pet.description = row['Pet Description'] pet.sex = row['Sex'] pet.age = row['Age'] raw_submission_date = row['submission date'] submission_date = UTC.localize( datetime.strptime(raw_submission_date, DATETIME_FORMAT)) pet.submission_date = submission_date pet.save() raw_vaccination_names = row['vaccinations'] vaccination_names = [name for name in raw_vaccination_names.split('| ') if name] for vac_name in vaccination_names: vac = Vaccine.objects.get(name=vac_name) pet.vaccinations.add(vac) pet.save()
def load_resp(self, resp, is_download): """ Loads json response from API. :param resp: Response from API :type resp: dictionary :param is_download: Calculates time taken based on 'updated' field in response if upload, and based on stop time if download :type is_download: boolean """ assert isinstance(resp, dict) setattr(self, 'resp', resp) setattr(self, 'size', humanize.naturalsize(int(resp['size']))) if is_download: updated_at = datetime.now(UTC) else: updated_at = UTC.localize(datetime.strptime(resp['updated'], '%Y-%m-%dT%H:%M:%S.%fZ')) setattr(self, 'time_taken', dict(zip( ('m', 's'), divmod((updated_at - getattr(self, 'start_time')).seconds if updated_at > getattr(self, 'start_time') else 0, 60) ))) setattr(self, 'full_path', 'gs://%s/%s' % (resp['bucket'], resp['name']))
def to_mobile_date(self, cr, uid, model_date): user = self.pool.get('res.users').browse(cr, uid, uid) local_tz = pytz.timezone(user.partner_id.tz) fecha = datetime.strptime(model_date, tools.DEFAULT_SERVER_DATETIME_FORMAT) fecha = UTC.localize(fecha, is_dst=False) fecha = fecha.astimezone(local_tz) return fecha.strftime('%d/%m/%Y %H:%M:%S')
def activities_and_calories(gauge_factory, config, logger): activity_gauge = gauge_factory('runkeeper.activities') calorie_gauge = gauge_factory('runkeeper.calories_burned') local_tz = timezone(config['runkeeper.local_tz']) user = healthgraph.User(session=healthgraph. Session(config['runkeeper.access_token'])) activities_iter = user.get_fitness_activity_iter() today = today_utc().date() today_activities = [] for a in activities_iter: # breaking early prevents loading all results activity_time = a['start_time'].replace(tzinfo=local_tz) activity_time_utc = UTC.normalize(activity_time) day = activity_time_utc.date() if day == today: today_activities.append(a) elif (today - day).days > 2: break total_activities = len(today_activities) total_calories = int(sum([a['total_calories'] for a in today_activities])) activity_gauge.save(today_utc(), total_activities) calorie_gauge.save(today_utc(), total_calories) logger.info('Saved {0} activities ({1} cal) for {2}' .format(total_activities, total_calories, today_utc()))
def load(self, timestamp): """Return (stored_datetime, loaded_datetime). The stored_datetime is the timestamp actually stored in Postgres, which may or may not be the timestamp we saved. The loaded_datetime is the timestamp we end up with using this method. """ select = { 'timestamp_explicit': "timestamp at time zone '{}'".format(self.tz.zone), 'timestamp_stored': "timestamp at time zone 'UTC'", } loaded_attr = ('timestamp' if self.conversion == 'implicit' else 'timestamp_explicit') qs = Timestamp.objects.extra(select=select) timestamp = qs.get(pk=timestamp.pk) stored_datetime = UTC.localize(timestamp.timestamp_stored) loaded_datetime = self.tz.localize(getattr(timestamp, loaded_attr)) return stored_datetime, loaded_datetime
def test_timezone(self): dt = datetime(2009, 11, 10, 23, 0, 0, 123456) utc = UTC.localize(dt) berlin = timezone('Europe/Berlin').localize(dt) eastern = berlin.astimezone(timezone('US/Eastern')) data = { "points": [ {"measurement": "A", "fields": {"val": 1}, "time": 0}, {"measurement": "A", "fields": {"val": 1}, "time": "2009-11-10T23:00:00.123456Z"}, {"measurement": "A", "fields": {"val": 1}, "time": dt}, {"measurement": "A", "fields": {"val": 1}, "time": utc}, {"measurement": "A", "fields": {"val": 1}, "time": berlin}, {"measurement": "A", "fields": {"val": 1}, "time": eastern}, ] } self.assertEqual( line_protocol.make_lines(data), '\n'.join([ 'A val=1i 0', 'A val=1i 1257894000123456000', 'A val=1i 1257894000123456000', 'A val=1i 1257894000123456000', 'A val=1i 1257890400123456000', 'A val=1i 1257890400123456000', ]) + '\n' )
def to_utc(dt): if not isinstance(dt, datetime): dt = datetime(dt.year, dt.month, dt.day) if dt.tzinfo is None: return UTC.localize(dt) else: return dt.astimezone(UTC)
def _convert_timestamp(timestamp, precision=None): if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int if isinstance(_get_unicode(timestamp), text_type): timestamp = parse(timestamp) if isinstance(timestamp, datetime): if not timestamp.tzinfo: timestamp = UTC.localize(timestamp) ns = (timestamp - EPOCH).total_seconds() * 1e9 if precision is None or precision == 'n': return ns elif precision == 'u': return ns / 1e3 elif precision == 'ms': return ns / 1e6 elif precision == 's': return ns / 1e9 elif precision == 'm': return ns / 1e9 / 60 elif precision == 'h': return ns / 1e9 / 3600 raise ValueError(timestamp)
def login_patch(auth, username=None, password=None): if ((username is None and password is None) or (username == 'jane' and password == 'secret')): auth._token = '1a2b3c' auth._lifespan = 3600 auth._expires = UTC.localize(datetime.now() + timedelta(hours=1)) return True raise AuthenticationFailed()
def localize_timezone(datetime, tz=None): if not datetime.tzinfo: datetime = UTC.localize(datetime) if not tz: tz = get_timezone() if isinstance(tz, basestring): tz = timezone(tz) return datetime.astimezone(tz)
def apply_tzdatabase_timezone(date_time, pytz_string): date_time = UTC.localize(date_time) usr_timezone = timezone(pytz_string) if date_time.tzinfo != usr_timezone: date_time = date_time.astimezone(usr_timezone) return date_time.replace(tzinfo=None)
def _assert_release_by_creator(creator, spr): release_records = store.find( LatestPersonSourcePackageReleaseCache, LatestPersonSourcePackageReleaseCache.creator_id == creator.id) [record] = list(release_records) self.assertEqual(spr.creator, record.creator) self.assertIsNone(record.maintainer_id) self.assertEqual( spr.dateuploaded, UTC.localize(record.dateuploaded))
def apply_timezone(date_time, tz_string): if not date_time.tzinfo: date_time = UTC.localize(date_time) new_datetime = apply_dateparser_timezone(date_time, tz_string) if not new_datetime: new_datetime = apply_tzdatabase_timezone(date_time, tz_string) return new_datetime
def __init__(self, channel_participation, type, source, message, subtype=None): self.channel_participation = channel_participation self.channel_participation_id = channel_participation.id self.type = type self.subtype = subtype self.source = source self.msg = message self.stamp = UTC.localize(datetime.datetime.utcnow()) self.channel_participation.channel.update_last_entry(self.stamp) Session.save(self.channel_participation.channel)
def update_index(self): last_index_dt = UTC.localize(datetime(*gmtime(self.last_index)[:6])) now = UTC.localize(datetime.utcnow()) idx = self.get_index() writer = idx.writer() try: chmgr = IRCChannelManager(self.env) for channel in chmgr.channels(): for line in channel.events_in_range(last_index_dt, now): if line['type'] == 'comment': content = "<%s> %s"%(line['nick'], line['comment']) writer.add_document( channel=channel.name(), timestamp=line['timestamp'].strftime( self.TIMESTAMP_FORMAT), content=content ) if line['type'] == 'action': content = "* %s %s"%(line['nick'], line['action']) writer.add_document( channel=channel.name(), timestamp=line['timestamp'].strftime( self.TIMESTAMP_FORMAT), content=content ) # START BULLSHIT # Python can't turn a nonlocal datetime to a epoch time AFIACT # This pisses me off to no end. Who knows what kind of f****d # up side effects this has. os.environ['TZ'] = 'UTC' tzset() # END BULLSHIT epoch_now = int(mktime(now.timetuple())) self.config['irclogs'].set('last_index', epoch_now) self.config.save() writer.commit() idx.close() except Exception, e: writer.cancel() idx.close() raise e
def to_utc_date(dt): """Convert a localized datetime object to UTC one. This method will adjust the time data, to make sure the conversion from localized timezone to UTC is correct. :param dt: a localized datetime object. :type dt: :class:`datetime.datetime` :return: a UTC datetime object. """ return UTC.normalize(dt.astimezone(UTC))
def search(self, terms): chmgr = IRCChannelManager(self.env) ix = self.get_index() searcher = ix.searcher() parsed_terms = self.PARSER.parse(' or '.join(terms)) if terms: for f in searcher.search(parsed_terms): timestamp = strptime(f['timestamp'], self.TIMESTAMP_FORMAT) f['timestamp'] = \ UTC.localize(datetime(*timestamp[:6])) yield f
def on_topic(self, c, e): args = e.arguments() if len(args) < 2: channel = e.target().lstrip(''.join(CHANNEL_PREFIXES)) topic = args[0] else: channel = args[0].lstrip(''.join(CHANNEL_PREFIXES)) topic = args[1] changed_by = irclib.nm_to_n(e.source()) channel_participation_id = self.channels[channel] channel_participation = model.Session.query(model.ChannelParticipation) \ .get(channel_participation_id) channel_topic = model.Session.query(model.ChannelTopic) \ .get(channel_participation_id) channel = channel_participation.channel if not channel_topic: log.debug("Setting topic for channel %s%s by %s." % \ (channel.channel_prefix, channel.channel_name, changed_by)) channel_topic = model.ChannelTopic(channel_participation, topic) model.Session.save(channel_topic) model.Session.commit() elif channel_topic.topic != topic: log.debug('Topic for channel %s%s changed by %s, updating DB' % \ (channel.channel_prefix, channel.channel_name, changed_by)) channel_topic.topic = topic model.Session.save(channel_topic) model.Session.commit() channel_topic_info = model.Session.query(model.ChannelTopicInfo) \ .get(channel_participation_id) changed_on = UTC.localize(datetime.utcnow()) if not channel_topic_info: log.debug('on_topic ChannelTopicInfo for %s non-existant' % channel) channel_topic_info = model.ChannelTopicInfo(channel_participation, changed_by, changed_on) else: if channel_topic_info.changed_by != changed_by or \ channel_topic_info.changed_on != changed_on: log.debug(self.__class__.__name__ + ' updating topic info for channel %s...' % channel) if channel_topic_info.changed_by != changed_by: channel_topic_info.changed_by = changed_by if channel_topic_info.changed_on != changed_on: channel_topic_info.changed_on = changed_on model.Session.save(channel_topic_info) model.Session.commit() message = "%s changed the topic to '%s'" % (changed_by, topic) self.write_event(channel.channel_name, 'topic', '*****', message) model.Session.remove()
def get_tzinfo(user): if user.is_authenticated(): from pytz import UnknownTimeZoneError, UTC try: tz = user.get_profile().timezone # delt = tz.utcoffset(datetime.now()) # NOTICE: Not to use utcoffset, value could be wrong on server side now = datetime.now() delt = UTC.localize(now) - tz.localize(now) # timedelta attr: days, seconds, microseconds offset = delt.days*86400 + delt.seconds return {'tzname': str(tz), 'offset': offset} except UserProfile.DoesNotExist, UnknownTimeZoneError: return None
def addNewsItem(self, aNewsItem): self.checkNewsTable() if(aNewsItem.isSuccessful()): ticker = aNewsItem.fullfeed.ticker service = aNewsItem.service fullsource = aNewsItem.fullsource title = aNewsItem.feedentry.title desc = aNewsItem.feedentry.description pub = UTC.localize(datetime.strptime(aNewsItem.feedentry.published, "%a, %d %b %Y %H:%M:%S %Z")).isoformat() newsid = aNewsItem.feedentry.id link = aNewsItem.url self.cursor.execute("INSERT INTO News VALUES (?,?,?,?,?,?,?,?)", (ticker, service, title, pub, desc, link, newsid, fullsource,)) self.databaseconnection.commit()
def get_legs(result): legs = result['Legs'] segments = result['Segments'] for leg in legs: l, created = Leg.objects.get_or_create( id=leg['Id'], departure_place=Place.objects.get(pk=leg['OriginStation']), arrival_place=Place.objects.get(pk=leg['DestinationStation']), departure=UTC.localize(parse_datetime(leg['Departure']), is_dst=True), arrival=UTC.localize(parse_datetime(leg['Arrival']), is_dst=True), duration=leg['Duration'], directionality=leg['Directionality'], journey_mode=JourneyMode.objects.get_or_create( name=leg['JourneyMode'])[0]) carriers = leg['Carriers'] ocarriers = leg['OperatingCarriers'] stops = leg['Stops'] lsegments = leg['SegmentIds'] for carrier in carriers: l.carriers.add(Carrier.objects.get(pk=carrier)) for ocarrier in ocarriers: l.operating_carriers.add(Carrier.objects.get(pk=ocarrier)) for stop in stops: if stop != 0: l.stops.add(Place.objects.get(pk=stop)) for lsegment in lsegments: l.segments.add(update_segment(lsegment, segments)) l.save()
def datetime_filter(value, format='medium', locale=None, usertz=True): # NOQA: A002 if isinstance(value, datetime) and usertz: if value.tzinfo is None: dt = UTC.localize(value).astimezone(get_timezone()) else: dt = value.astimezone(get_timezone()) else: dt = value return format_datetime( dt, format=format, locale=locale if locale else get_locale()) # NOQA: A002
def name_get(self, cr, uid, ids, context=None): result = {} local_tz = pytz.timezone('America/Guayaquil') if context and "tz" in context.keys(): local_tz = pytz.timezone(context["tz"]) for route in self.browse(cr, uid, ids, context=context): if route.customer_partner_id and route.request_date: fecha_ruta = datetime.strptime(route.request_date, '%Y-%m-%d %H:%M:%S') fecha_ruta = UTC.localize(fecha_ruta, is_dst=False) result[route.id] = route.customer_partner_id.name_get()[0][1] + "[" + fecha_ruta.astimezone(local_tz).strftime('%Y-%m-%d %H:%M:%S')+ "]" else: result[route.id] = "Ruta:"+route.id return result.items()
def iter_text(_statuses, _filter): for s in _statuses: if _filter is not None: if 'created_at' not in s: continue else: created_at = s.get('created_at') created = parser.parse(created_at, fuzzy=True) if isinstance( created_at, basestring) else utc.localize(created_at) if created > _filter: continue text = s.get('text').encode('UTF-8') yield text
def valid_payout_user_direct(sponsor_id, m, last_date, next_date, sponsor_pkg): doj = UTC.normalize(member.child_id.date_joined) # check if member is active pkg = get_package(member.child_id) # check if member falls within this cycle. # check if is a direct sponsor try: if pkg: return (last_date <= doj < next_date) and (member.child_id.profile.sponser_id.profile.user_auto_id == sponsor_id) and pkg >= sponsor_pkg return False except Exception as e: print e.message return False
def valid_payout_user(sponsor_id, member, last_date, next_date, dry=True): """Filter users that have their Date of Joining between last payout and next payout day - params: dry: For dry run, no side-effect function if True. If False, generates a Direct Type Transaction. Defaults to True. """ doj = UTC.normalize(member.child_id.date_joined) # check if member is active pkg = get_package(member.child_id) # check if member falls within this cycle. # check if is a direct sponsor try: return (last_date <= doj < next_date) and (member.child_id.profile.sponser_id.profile.user_auto_id == sponsor_id) and pkg except: return False
def property_data_hora_setter(self, valor): if isinstance(valor, datetime.datetime): if not valor.tzinfo: valor = self.fuso_horario.localize(valor) return UTC.normalize(valor) elif isinstance(valor, datetime_sem_fuso): valor = self.fuso_horario.localize(valor) return property_data_hora_setter(valor) elif isinstance(valor, basestring): valor = parse_datetime(valor) return property_data_hora_setter(valor)
def handle(self, *args, **options): for ws in WeatherStation.objects.all(): try: required_days = [i.strftime('%Y-%m-%d') for i in pd.date_range(start = datetime(2009, 1, 1, tzinfo = UTC), end = timezone.now())] stored_days = ws.get_list_of_days() days_to_load = [i for i in required_days if i not in stored_days] for j in days_to_load: result = ws.load_weather_day(UTC.localize(datetime.strptime(j,'%Y-%m-%d'))) except: print 'Failed to completely update WeatherStation %s.' % ws.name else: print 'Updated WeatherStation %s.' % ws.name
def test_retry_lockout_active(self): APIRateLimitTimestamp.objects.create( api_type="untappd", rate_limit_expires_at=UTC.localize( datetime.datetime(2019, 11, 15, 3, 0, 0)), ) responses.add( responses.GET, self.untappd_url, status=200, # will trigger an exception if this gets called body="{}", ) self.assertFalse(UntappdMetadata.objects.exists()) with self.assertRaises(Retry): look_up_beer(self.beer.id) self.assertFalse( UntappdMetadata.objects.filter(beer=self.beer).exists()) untappd_timestamp = APIRateLimitTimestamp.objects.get() self.assertEqual( untappd_timestamp.rate_limit_expires_at, UTC.localize(datetime.datetime(2019, 11, 15, 3, 0, 0)), )
def serialize(self, request): href = request.urlgen( "mediagoblin.api.object", object_type=self.object_type, id=self.id, qualified=True ) published = UTC.localize(self.published) updated = UTC.localize(self.updated) obj = { "id": href, "actor": self.get_actor.serialize(request), "verb": self.verb, "published": published.isoformat(), "updated": updated.isoformat(), "content": self.content, "url": self.get_url(request), "object": self.get_object.serialize(request), "objectType": self.object_type, "links": { "self": { "href": href, }, }, } if self.generator: obj["generator"] = self.get_generator.serialize(request) if self.title: obj["title"] = self.title target = self.get_target if target is not None: obj["target"] = target.serialize(request) return obj
def test_display(self): instance = Event.objects.create( title='test', venue=self.venue, start_time=UTC.localize(datetime.datetime(2018, 11, 20, 12)), end_time=UTC.localize(datetime.datetime(2018, 11, 20, 16)), ) serializer = EventSerializer(instance=instance) for field, value in serializer.data.items(): if field == 'venue': self.assertEqual( value, VenueSerializer(self.venue).data, field, ) elif field.endswith('_time'): self.assertEqual( value, DateTimeField().to_representation(getattr(instance, field), ), field, ) else: self.assertEqual(value, getattr(instance, field), field)
def parse_tap(self, tap): """Parse tap info from JSON entry.""" ret = { "added": dateutil.parser.parse(tap["DatePutOn"]), "updated": now(), "tap_number": tap["MenuItemDisplayDetail"]["DisplayOrder"], "percent_full": tap["MenuItemProductDetail"]["PercentFull"], "gas_type": (tap["MenuItemProductDetail"]["KegType"] or "").lower(), } if refresh_ts := tap.get("LastRefreshDateTime"): ret["updated"] = dateutil.parser.parse(refresh_ts) if not ret["updated"].tzinfo: ret["updated"] = UTC.localize(ret["updated"]) LOG.debug("Tap time updated set to %s", ret["updated"])
def serialize(self, request): href = request.urlgen( "mediagoblin.api.object", object_type=self.object_type, id=self.id, qualified=True ) published = UTC.localize(self.published) updated = UTC.localize(self.updated) obj = { "id": href, "actor": self.get_actor.serialize(request), "verb": self.verb, "published": published.isoformat(), "updated": updated.isoformat(), "content": self.content, "url": self.get_url(request), "object": self.object().serialize(request), "objectType": self.object_type, "links": { "self": { "href": href, }, }, } if self.generator: obj["generator"] = self.get_generator.serialize(request) if self.title: obj["title"] = self.title if self.target_id is not None: obj["target"] = self.target().serialize(request) return obj
def test_timestamp(self): """Test timezone in TestLineProtocol object.""" dt = datetime(2009, 11, 10, 23, 0, 0, 123456) utc = UTC.localize(dt) berlin = timezone('Europe/Berlin').localize(dt) eastern = berlin.astimezone(timezone('US/Eastern')) exp_utc = 'A val=1i 1257894000123456000' exp_est = 'A val=1i 1257890400123456000' point = Point.measurement("A").field("val", 1).time(dt) self.assertEqual(point.to_line_protocol(), exp_utc) self.assertEqual(point.time(utc).to_line_protocol(), exp_utc) self.assertEqual(point.time(berlin).to_line_protocol(), exp_est) self.assertEqual(point.time(eastern).to_line_protocol(), exp_est)
def _log_janitor(self, days_to_keep: int) -> None: """Prunes our on-disk logs""" first_key = next(iter(self["channel_action_log"])) if UTC.localize(datetime.utcnow()) - parse(first_key) > timedelta( days=days_to_keep): with synchronized(CAL_LOCK): cal_log = self["channel_action_log"] cal_log.pop(first_key, None) self["channel_action_log"] = cal_log cal_log = self["channel_action_log"] today = datetime.now().strftime("%Y-%m-%d") for key in cal_log.keys(): if len(cal_log[key]) == 0 and key != today: cal_log.pop(key) self["channel_action_log"] = cal_log
def handle(self, *args, **options): dt = options['date'] + ' ' + options['time'] measurement = SQTVaisalaMeasurement( time = UTC.localize(datetime.strptime(dt, '%Y-%m-%d %H:%M:%S')), wind_speed = options['wind_speed'], wind_direction = options['wind_direction'], air_temperature = options['air_temperature'], air_humidity = options['air_humidity'], air_pressure = options['air_pressure'], rain_amount = options['rain_amount'], heater_temperature = options['heater_temperature'], heater_voltage = options['heater_voltage'] ) measurement.save()
def post(self, request, verify_token): try: verification_token = EmailVerificationToken.objects.get( verification_token=verify_token ) except EmailVerificationToken.DoesNotExist: return Response( {"message": "error.emailVerificationFailed"}, status=status.HTTP_401_UNAUTHORIZED, ) if utc.localize( datetime.datetime.now() ) < verification_token.creation_date + datetime.timedelta(hours=24): verification_token.user.email_verified = True verification_token.user.save() # auto log the user in after verifying their email login(request, verification_token.user) # delete the verification token so it can't be used again verification_token.delete() return Response() else: new_token = EmailVerificationToken.objects.create( user=verification_token.user ) url = f"{config.SITE_URL}/profile/email/{new_token.verification_token}/verify/" new_token.user.email_link( "Action Required: Verify Email", "Verify Email", "Verify Email", "Please verify your email address to activate your account.", url, "Verify Now", ) verification_token.delete() return Response( {"message": "error.emailVerificationExpired"}, status=status.HTTP_403_FORBIDDEN, )
def test_retry_lockout_expired(self): APIRateLimitTimestamp.objects.create( api_type="untappd", rate_limit_expires_at=UTC.localize( datetime.datetime(2019, 11, 15, 3, 0, 0)), ) responses.add( responses.GET, self.untappd_url, status=200, body=self.json_data, ) self.assertFalse(UntappdMetadata.objects.exists()) look_up_beer(self.beer.id) self.assertTrue( UntappdMetadata.objects.filter(beer=self.beer).exists()) self.assertFalse(APIRateLimitTimestamp.objects.exists())
class Migration(migrations.Migration): dependencies = [ ('pretixbase', '0197_auto_20210914_0814'), ] operations = [ migrations.AddField( model_name='invoice', name='sent_to_customer', field=models.DateTimeField(blank=True, null=True, default=UTC.localize( datetime(1970, 1, 1, 0, 0, 0, 0))), preserve_default=False, ), ]
def test_retry_no_existing_data(self): responses.add( responses.GET, self.untappd_url, status=429, headers=self.limit_headers, ) self.assertFalse(UntappdMetadata.objects.exists()) with self.assertRaises(Retry): look_up_beer(self.beer.id) self.assertFalse( UntappdMetadata.objects.filter(beer=self.beer).exists()) untappd_timestamp = APIRateLimitTimestamp.objects.get() self.assertEqual( untappd_timestamp.rate_limit_expires_at, UTC.localize(datetime.datetime(2019, 11, 15, 3, 0, 0)), )
def create_fake_users(count, verbosity=1): randomuser_me = 'https://randomuser.me/api/' user_count = count params = { 'inc': 'email,name,registered,login', 'password': '******', 'results': user_count } success = 0 msg = 0 response = requests.get(randomuser_me, params) data = response.json() if data.get('error'): error_msg = data.get('error') success = 1 msg = error_msg else: results = data.get('results') if results: for item in results: email = item.get('email') first_name = item.get('name').get('first') last_name = item.get('name').get('last') date_joined = item.get('registered') username = item.get('login').get('username') password = item.get('login').get('password') # 2014-04-16 16:18:56 date_joined = datetime.strptime(date_joined, '%Y-%m-%d %H:%M:%S') date_joined = UTC.localize(date_joined) # todo: use bulk insert user_created, created = User.objects.get_or_create( email=email, first_name=first_name, last_name=last_name, username=username, date_joined=date_joined) if created: user_created.set_password(password) if verbosity > 1: print('user: {}, created:{}'.format(user_created, created)) return success, msg
def get(cls, date=None): if date is None: date = datetime.now(UTC) else: date = date.astimezone(UTC) if date.tzinfo else UTC.localize(date) pdt_start = date.replace(month=3, day=15, hour=9, minute=0, second=0) pdt_start -= timedelta(days=pdt_start.weekday() + 1) pdt_finish = date.replace(month=11, day=8, hour=8, minute=59, second=59) pdt_finish -= timedelta(days=pdt_finish.weekday() + 1) shift = -7 if pdt_start <= date <= pdt_finish else -8 return shift
def shortdate(value): if isinstance(value, datetime): tz = get_timezone() if value.tzinfo is None: dt = UTC.localize(value).astimezone(tz) else: dt = value.astimezone(tz) utc_now = request_timestamp().astimezone(tz) else: dt = value utc_now = request_timestamp().date() if dt > (utc_now - timedelta( days=int(current_app.config.get('SHORTDATE_THRESHOLD_DAYS', 0)))): return dt.strftime('%e %b') else: # The string replace hack is to deal with inconsistencies in the underlying # implementation of strftime. See https://bugs.python.org/issue8304 return six.text_type(dt.strftime("%e %b '%y")).replace("'", "’")
def handle(self, *args, **options): if Note.objects.exists() or Stock.objects.exists(): print('Stock data already loaded...exiting.') print(ALREADY_LOADED_ERROR_MESSAGE) return print("Creating note data") exchange_object = Exchange(ticker='NASDAQ') exchange_object.save() print("Loading pet data for pets available for adoption") for row in DictReader(open('./stocks_db_initial.csv')): stock = Stock() stock.ticker = row['Ticker'] stock.company = row['Company'] stock.updater = row['Updater'] stock.price = row['Price'] stock.exchange = exchange_object stock.save() raw_stocks_notes = row['Notes'] all_notes_array = raw_stocks_notes.split(' & ') print(all_notes_array) for anote in all_notes_array: note_details = anote.split(' | ') note_title = note_details[0] note_body = note_details[1] note_body_array = note_body.split(' $ ') note_real_body = note_body_array[0] raw_note_time = note_body_array[1] note_time = UTC.localize( datetime.strptime(raw_note_time, DATETIME_FORMAT)) newnote = Note(title=note_title, note_text=note_real_body, submition_date=note_time) newnote.save() stock.notes.add(newnote) stock.save()
def format_timestamp(date): """Format timestamp as required by the InfluxDB line protocol. Parameters ---------- date : `str` Timestamp string Returns ------- timestamp : `int` Timestamp in nanosecond-precision Unix time. See https://docs.influxdata.com/influxdb/v1.6/write_protocols/ """ epoch = UTC.localize(datetime.utcfromtimestamp(0)) timestamp = int((parse(date) - epoch).total_seconds() * 1e9) return timestamp
def default(self, obj): # lint-amnesty, pylint: disable=arguments-differ, method-hidden """ Serialize datetime and date objects of iso format. datatime objects are converted to UTC. """ if isinstance(obj, datetime): if obj.tzinfo is None: # Localize to UTC naive datetime objects obj = UTC.localize(obj) # lint-amnesty, pylint: disable=no-value-for-parameter else: # Convert to UTC datetime objects from other timezones obj = obj.astimezone(UTC) return obj.isoformat() elif isinstance(obj, date): return obj.isoformat() return super(DateTimeJSONEncoder, self).default(obj) # lint-amnesty, pylint: disable=super-with-arguments
def default(self, obj): # pylint: disable=method-hidden """ Serialize datetime and date objects of iso format. datatime objects are converted to UTC. """ if isinstance(obj, datetime): if obj.tzinfo is None: # Localize to UTC naive datetime objects obj = UTC.localize(obj) else: # Convert to UTC datetime objects from other timezones obj = obj.astimezone(UTC) return obj.isoformat() elif isinstance(obj, date): return obj.isoformat() return super(DateTimeJSONEncoder, self).default(obj)
def setup_logging(loglevel, logpath): logname = logpath + "-" + UTC.localize( datetime.utcnow()).strftime("%Y%m%d_%H%M%S_%f") if loglevel == "debug": level = logging.DEBUG elif loglevel == "error": level = logging.ERROR elif loglevel == "warning": level = logging.WARNING else: level = logging.INFO logging.basicConfig( filename=logname, # format="%(asctime)s:%(levelname)s:%(module)s:%(message)s", format="%(asctime)s_%(msecs)03d:%(levelname)s:%(message)s", datefmt="%Y%m%d_%H%M%S", level=level, ) logging.Formatter.converter = time.gmtime
def test_localize(self): dt = datetime(1992, 10, 30, 12, 0, 0, 0, tzinfo=None) # plain to UTC dt2 = UTC.localize(dt) self.assertEqual( (dt2.month, dt2.day, dt2.hour, dt2.minute), (10, 30, 12, 0)) # plain to Shanghai tz = timezone('Asia/Shanghai') dt3 = tz.localize(dt) self.assertEqual( (dt3.month, dt3.day, dt3.hour, dt3.minute), (10, 30, 12, 0)) # UTC to Shanghai dt4 = dt2.astimezone(tz) self.assertEqual( (dt4.month, dt4.day, dt4.hour, dt4.minute), (10, 30, 20, 0)) # Shanghai to UTC dt5 = dt3.astimezone(UTC) self.assertEqual( (dt5.month, dt5.day, dt5.hour, dt5.minute), (10, 30, 4, 0))
def updated_configs(self, bq_project: str, bq_dataset: str) -> List[ExternalConfig]: """ Return external configs that have been updated/added and with associated BigQuery tables being out of date. """ client = bigquery.Client(bq_project) job = client.query( fr""" SELECT table_name, REGEXP_EXTRACT_ALL( option_value, '.*STRUCT\\(\"last_updated\", \"(.+)\"\\).*' ) AS last_updated FROM {bq_dataset}.INFORMATION_SCHEMA.TABLE_OPTIONS WHERE option_name = 'labels' """ ) result = list(job.result()) updated_configs = [] for config in self.configs: seen = False table_prefix = bq_normalize_name(config.slug) for row in result: if not row.table_name.startswith(table_prefix): continue seen = True if not len(row.last_updated): continue table_last_updated = UTC.localize( dt.datetime.utcfromtimestamp(int(row.last_updated[0])) ) if table_last_updated < config.last_modified: updated_configs.append(config) break if not seen: updated_configs.append(config) return updated_configs
def data_hora_horario_brasilia(data): if isinstance(data, basestring): data = parse_datetime(data) if not isinstance(data, (datetime.datetime, datetime_sem_fuso, date, time)): return None if isinstance(data, datetime.datetime): if not data.tzinfo: data = fuso_horario_sistema().localize(data) elif isinstance(data, date): # # Ajusta date para datetime ao meio-dia, # pra não voltar a data pro dia anterior # Define depois a hora para meia-noite # data = datetime_sem_fuso(data.year, data.month, data.day, 12, 0, 0, 0) data = data_hora_horario_brasilia(data) data = data + relativedelta(hour=0, minute=0, second=0, microsecond=0) return data elif isinstance(data, time): # # Hora sem data, assume a data de hoje # hora = data data = datetime.datetime.now() data = data_hora_horario_brasilia(data) data = data + relativedelta(hour=hora.hour, minute=hora.minute, second=hora.second, microsecond=hora.microsecond) return data elif isinstance(data, datetime_sem_fuso): data = fuso_horario_sistema().localize(data) data = UTC.normalize(data) data = HB.normalize(data) return data
def _copy_department_shifts(service, to_department, from_department, dept_role_map): from_config = service.config.info() FROM_EPOCH = c.EVENT_TIMEZONE.localize(datetime.strptime(from_config['EPOCH'], '%Y-%m-%d %H:%M:%S.%f')) EPOCH_DELTA = c.EPOCH - FROM_EPOCH for from_job in from_department['jobs']: to_job = Job( name=from_job['name'], description=from_job['description'], duration=from_job['duration'], type=from_job['type'], extra15=from_job['extra15'], slots=from_job['slots'], start_time=UTC.localize(dateparser.parse(from_job['start_time'])) + EPOCH_DELTA, visibility=from_job['visibility'], weight=from_job['weight'], department_id=to_department.id) for from_required_role in from_job['required_roles']: to_job.required_roles.append(dept_role_map[from_required_role['id']]) to_department.jobs.append(to_job)
def handle(self, *args, **options): if DashboardData.objects.exists(): print('Dashboard data already loaded...exiting.') print(ALREADY_LOADED_ERROR_MESSAGE) return print("Loading dashboard data for manufacturing process") for row in DictReader(open('./dashBoardData.csv')): data = DashboardData() data.pulsometer_readout = row['Pulsometer_readout'] data.engine_efficiency = float(row['Engine_efficiency']) data.red_value = row['red_Value'] data.blue_value = row['blue_Value'] data.green_value = row['green_Value'] raw_timestamp = row['time_stamp'] timestamp = UTC.localize( datetime.strptime(raw_timestamp, DATETIME_FORMAT)) data.timestamp = timestamp data.save() print("Dashboard data finished loading")
def addServices(): # serviceId, serviceName, spId, sendingMode, sendingWeekDays, sendTime , status, notifyStatus for row in DictReader(open('./serviceinfo.csv')): s = ServiceInfo() s.serviceId = row['serviceId'] s.serviceName = row['email'] s.spId = row['status'] s.sendingMode = row['sendingMode'] s.sendingWeekDays = row['sendingWeekDays'] s.status = row['status'] s.notifyStatus = row['notifyStatus'] rawSendTime = row['validTime'] send = UTC.localize(datetime.strptime(rawSendTime, DATETIME_FORMAT)) s.validTime = send s.sendTime = row['sendTime'] s.addService() c.commit()