def setUp(self): self.client = Client() self.aidant_thierry = factories.UserFactory() self.usager = Usager.objects.create( given_name="Joséphine", family_name="ST-PIERRE", preferred_username="******", birthdate="1969-12-15", gender="female", birthplace="70447", birthcountry="99100", sub="123", email="*****@*****.**", ) self.connection = Connection.objects.create(state="test_state", code="test_code", nonce="test_nonce", usager=self.usager) date_further_away_minus_one_hour = datetime( 2019, 1, 9, 8, tzinfo=pytz_timezone("Europe/Paris")) self.connection = Connection.objects.create( state="test_expiration_date_triggered", code="test_code", nonce="test_nonce", usager=self.usager, expiresOn=date_further_away_minus_one_hour, ) mandat_creation_date = datetime(2019, 1, 5, 3, 20, 34, 0, tzinfo=pytz_timezone("Europe/Paris")) self.mandat = Mandat.objects.create( aidant=self.aidant_thierry, usager=self.usager, demarche="transports", expiration_date=mandat_creation_date + timedelta(days=6), creation_date=mandat_creation_date, ) self.mandat_2 = Mandat.objects.create( aidant=self.aidant_thierry, usager=self.usager, demarche="famille", expiration_date=mandat_creation_date + timedelta(days=6), creation_date=mandat_creation_date, ) self.mandat_3 = Mandat.objects.create( aidant=self.aidant_thierry, usager=self.usager, demarche="logement", expiration_date=mandat_creation_date + timedelta(days=3), creation_date=mandat_creation_date, )
def test_write_submitty_date(self, get_timezone): testcases = ((datetime(2020, 6, 12, 3, 21, 30, tzinfo=pytz_timezone('UTC')), '2020-06-12 03:21:30+0000'), (datetime(2020, 12, 25, 3, 21, 30, tzinfo=pytz_timezone('UTC')), '2020-12-25 03:21:30+0000'), (datetime(2020, 6, 12, 3, 21, 30, 123, tzinfo=pytz_timezone('UTC')), '2020-06-12 03:21:30+0000'), (datetime(2020, 6, 12, 3, 21, 30), '2020-06-12 03:21:30-0400'), (datetime(2020, 12, 12, 3, 21, 30), '2020-12-12 03:21:30-0500')) for testcase in testcases: with self.subTest(i=testcase[0]): self.assertEqual(testcase[1], dateutils.write_submitty_date(testcase[0]))
def handle_datetime(self): ''' When Django accepts datetime input, it creates a datetime aware object relative to the current timezone. This current timezone will be the timezone of the user. When the user submits a ticket, we need to assign that ticket a datetime object that is tz aware in the timezone where the ticket event is located. This field makes sure that happens. This field expects to receive a list containing a formatted datetime string as well as a zipcode. The zipcode will be used to lookup the associated timezone. ''' inputted_datetime = datetime.datetime.combine(self.cleaned_data.get('start_date'), self.cleaned_data.get('start_time')) local_tz = pytz_timezone(self.cleaned_data.get('location').timezone) local_tz_aware_datetime = local_tz.localize(inputted_datetime) utc_tz = pytz_timezone('UTC') utc_tz_aware_datetime = utc_tz.normalize(local_tz_aware_datetime.astimezone(utc_tz)) today = dj_timezone.now() # Might as well keep seconds and microseconds in there so a time on now is accepted. if utc_tz_aware_datetime < today: logging.error('Event start date before current date') raise forms.ValidationError('Cannot submit ticket for event that has already started', code='expired_date') utc_tz_aware_datetime.replace(second=0, microsecond=0) # We don't want to start sorting # by time submitted accidentally self.cleaned_data['start_datetime'] = utc_tz_aware_datetime return
def test_gig_time_daylight_savings(self): """ check to see that gig times are rendered correctly when going from UTC in the database to something else. """ self.band.timezone = "UTC" self.band.save() Assoc.objects.create(member=self.joeuser, band=self.band, status=AssocStatusChoices.CONFIRMED) date1 = timezone.datetime(2037, 1, 2, 12, tzinfo=pytz_timezone("UTC")) date2 = timezone.datetime(2037, 7, 2, 12, tzinfo=pytz_timezone("UTC")) self.create_gig_form(call_date=self._dateformat(date1), call_time=self._timeformat(date1)) self.create_gig_form(call_date=self._dateformat(date2), call_time=self._timeformat(date2)) # get the gigs we just made gigs = Gig.objects.order_by("id") first, second = gigs c = Client() c.force_login(self.joeuser) response = c.get(f"/gig/{first.id}/") self.assertIn("noon", response.content.decode("ascii")) # now change the band's timezone and render again self.band.timezone = "America/New_York" self.band.save() response = c.get(f"/gig/{first.id}/") self.assertIn("7 a.m.", response.content.decode("ascii")) response = c.get(f"/gig/{second.id}/") self.assertIn("8 a.m.", response.content.decode("ascii"))
def handle_datetime(self): ''' When Django accepts datetime input, it creates a datetime aware object relative to the current timezone. This current timezone will be the timezone of the user. When the user submits a ticket, we need to assign that ticket a datetime object that is tz aware in the timezone where the ticket event is located. This field makes sure that happens. This field expects to receive a list containing a formatted datetime string as well as a zipcode. The zipcode will be used to lookup the associated timezone. ''' inputted_datetime = datetime.datetime.combine( self.cleaned_data.get('start_date'), self.cleaned_data.get('start_time')) local_tz = pytz_timezone(self.cleaned_data.get('location').timezone) local_tz_aware_datetime = local_tz.localize(inputted_datetime) utc_tz = pytz_timezone('UTC') utc_tz_aware_datetime = utc_tz.normalize( local_tz_aware_datetime.astimezone(utc_tz)) today = dj_timezone.now( ) # Might as well keep seconds and microseconds in there so a time on now is accepted. if utc_tz_aware_datetime < today: logging.error('Event start date before current date') raise forms.ValidationError( 'Cannot submit ticket for event that has already started', code='expired_date') utc_tz_aware_datetime.replace( second=0, microsecond=0) # We don't want to start sorting # by time submitted accidentally self.cleaned_data['start_datetime'] = utc_tz_aware_datetime return
class TestDateUtils(TestCase): @patch("submitty_utils.dateutils.get_current_time", return_value=pytz_timezone('America/New_York').localize( datetime(2016, 10, 14, 22, 11, 32, 0))) @patch("submitty_utils.dateutils.get_timezone", return_value=pytz_timezone('America/New_York')) def test_parse_datetime(self, current_time, get_timezone): testcases = (('2016-10-14 22:11:32+0200', datetime(2016, 10, 14, 22, 11, 32, 0, timezone(timedelta(hours=2)))), ('2016-10-14 22:11:32', datetime(2016, 10, 14, 22, 11, 32, 0, timezone(timedelta(hours=-4)))), ('2016-10-14', datetime(2016, 10, 14, 23, 59, 59, 0, timezone(timedelta(hours=-4)))), ('+1 days', datetime(2016, 10, 15, 23, 59, 59, 0, timezone(timedelta(hours=-4)))), ('+3 day', datetime(2016, 10, 17, 23, 59, 59, 0, timezone(timedelta(hours=-4)))), ('+0 days', datetime(2016, 10, 14, 23, 59, 59, 0, timezone(timedelta(hours=-4)))), ('-1 days', datetime(2016, 10, 13, 23, 59, 59, 0, timezone(timedelta(hours=-4)))), ('-10 day', datetime(2016, 10, 4, 23, 59, 59, 0, timezone(timedelta(hours=-4)))), ('+1 day at 10:30:00', datetime(2016, 10, 15, 10, 30, 0, 0, timezone(timedelta(hours=-4)))), (datetime(2016, 10, 4, 23, 59, 59, 0, timezone(timedelta(hours=+1))), datetime(2016, 10, 4, 23, 59, 59, 0, timezone(timedelta(hours=+1)))), (datetime(2016, 10, 4, 23, 59, 59, 0), datetime(2016, 10, 4, 23, 59, 59, 0, timezone(timedelta(hours=-4))))) for testcase in testcases: with self.subTest(str(testcase[0])): self.assertEqual(testcase[1], dateutils.parse_datetime(testcase[0])) def test_parse_datetime_invalid_type(self): with self.assertRaises(TypeError) as cm: dateutils.parse_datetime(10) self.assertEqual("Invalid type, expected str, got <class 'int'>", str(cm.exception)) def test_parse_datetime_invalid_format(self): with self.assertRaises(ValueError) as cm: dateutils.parse_datetime('invalid datetime') self.assertEqual('Invalid string for date parsing: invalid datetime', str(cm.exception))
def get_timezone(self, zone): if isinstance(zone, string_t): # Something tries to get UTC instead of settings London! octolog.info("<=Time _Zone=> GET_TIMEZONE(): isinstance %s", str(pytz_timezone(zone))) return pytz_timezone(zone) octolog.info("<=Time _Zone=> GET_TIMEZONE(): %s", str(zone)) return zone
def localize_date(date, from_tz=utc, to_tz=utc): if from_tz and to_tz: if isinstance(from_tz, basestring): from_tz = pytz_timezone(from_tz) if isinstance(to_tz, basestring): to_tz = pytz_timezone(to_tz) return from_tz.localize(date).astimezone(to_tz).replace(tzinfo=None) return date
def add_timezone(dt: datetime) -> datetime: if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None: return dt.replace( tzinfo=pytz_timezone(get_server_config(setting='timezone'))) else: # utc is used in storage by django return dt.replace(tzinfo=pytz_utc).astimezone( pytz_timezone(get_server_config(setting='timezone')))
def localize_date(date, from_tz=utc, to_tz=utc): if from_tz and to_tz: if isinstance(from_tz, str): from_tz = pytz_timezone(from_tz) if isinstance(to_tz, str): to_tz = pytz_timezone(to_tz) if date.tzinfo is None: date = from_tz.localize(date) return date.astimezone(to_tz) return date
def setUp(self): self.client = Client() self.aidant_thierry = AidantFactory() self.aidant_yasmina = AidantFactory( username="******", organisation=self.aidant_thierry.organisation, ) self.usager = UsagerFactory(given_name="Joséphine") self.connection = Connection.objects.create( state="avalidstate123", nonce="avalidnonce456", usager=self.usager, ) date_further_away_minus_one_hour = datetime( 2019, 1, 9, 8, tzinfo=pytz_timezone("Europe/Paris")) self.connection_2 = Connection.objects.create( state="test_expiration_date_triggered", nonce="test_nonce", usager=self.usager, expires_on=date_further_away_minus_one_hour, ) mandat_creation_date = datetime(2019, 1, 5, 3, 20, 34, 0, tzinfo=pytz_timezone("Europe/Paris")) self.mandat_thierry_usager_1 = MandatFactory( organisation=self.aidant_thierry.organisation, usager=self.usager, expiration_date=mandat_creation_date + timedelta(days=6), creation_date=mandat_creation_date, ) AutorisationFactory( mandat=self.mandat_thierry_usager_1, demarche="transports", ) AutorisationFactory( mandat=self.mandat_thierry_usager_1, demarche="famille", ) self.mandat_thierry_usager_2 = MandatFactory( organisation=self.aidant_thierry.organisation, usager=self.usager, expiration_date=mandat_creation_date + timedelta(days=3), creation_date=mandat_creation_date, ) AutorisationFactory( mandat=self.mandat_thierry_usager_2, demarche="logement", )
class DfsDateTimeUtil(object): """ This class provides methods for translating UTC datetimes to EST, and vice versa. This is useful, because the "day's games" for many sports appear to span multiple days when looked at in UTC. From the perspective of Daily Fantasy, this class considers a "day" to be the 24 hours in the EST timezone. (In our implementation we use pytz.timezone('US/Eastern')) All methods of this class expect UTC timezone aware datetime objects, and will raise NotUtcTimezoneException in all other cases. """ class NotUtcTimezoneException(Exception): pass UTC_TIMEZONE = pytz_timezone('UTC') DFS_TIMEZONE = pytz_timezone('US/Eastern') # datetime.weekday() constants MON = 0 TUE = 1 WED = 2 THR = 3 FRI = 4 SAT = 5 SUN = 6 def __init__(self, dfs_timezone=DFS_TIMEZONE): """ All major U.S. sports post their game start times in EST. :param dfs_timezone: the pytz.timezone object of the Daily Fantasy site (default: EST) """ self.dfs_timezone = dfs_timezone def __valid_utc(self, dt): if dt.tzinfo is not self.UTC_TIMEZONE: raise self.NotUtcTimezoneException( 'the supplied datetime object must be in utc') @staticmethod def create(date, time, tzinfo=UTC_TIMEZONE): dt = datetime(date.year, date.month, date.day, time.hour, time.minute, tzinfo=tzinfo) return dt
def test_handler_time_format_timezone(s3_bucket, single_calendar_mock, with_frozen_time): start_time_us = datetime(2019, 6, 15, 4, 0) # GMT - 4 us_time_zone = pytz_timezone('US/Eastern') oslo_time_zone = pytz_timezone('Europe/Oslo') start_time_us = us_time_zone.localize(start_time_us) start_time_us_rcf3339 = pyrfc3339.generate(start_time_us, utc=False) start_time_oslo = datetime(2019, 6, 15, 10, 0) end_time_oslo = datetime(2019, 6, 15, 13, 30) # GM + 2 end_time_oslo = oslo_time_zone.localize(end_time_oslo) end_time_1_rfc3339 = pyrfc3339.generate(end_time_oslo, utc=False) single_calendar_mock( { 'items': [{ 'id': '1234', 'start': { 'dateTime': start_time_us_rcf3339 }, 'end': { 'dateTime': end_time_1_rfc3339 }, 'location': 'Enheter-box1' }] }, {}) handler(None, None) expected = [{ 'event_id': '1234', 'calendar_id': '1', 'timestamp_from': int(start_time_oslo.timestamp()), 'timestamp_to': int(end_time_oslo.timestamp()), 'event_summary': '', 'event_button_names': ['box1'], 'creator': '' }, { 'event_id': '1234', 'calendar_id': '2', 'timestamp_from': int(start_time_oslo.timestamp()), 'timestamp_to': int(end_time_oslo.timestamp()), 'event_summary': '', 'event_button_names': ['box1'], 'creator': '' }] response = s3_bucket.Object(next(iter(s3_bucket.objects.all())).key).get() data = loads(response['Body'].read()) assert [i for i in data['data'] if i not in expected] == []
def test_mandat_expiration_date_setting(self): mandat_1 = Mandat.objects.create( aidant=self.aidant_marge, usager=self.usager_homer, demarche="Carte grise", expiration_date=timezone.now() + timedelta(days=3), ) self.assertEqual( mandat_1.creation_date, datetime(2019, 1, 14, tzinfo=pytz_timezone("Europe/Paris")), ) self.assertEqual( mandat_1.expiration_date, datetime(2019, 1, 17, tzinfo=pytz_timezone("Europe/Paris")), )
class Person(db.Model, BaseMixin, SerializerMixin): first_name = db.Column(db.String(80), nullable=False) last_name = db.Column(db.String(80), nullable=False) email = db.Column(EmailType, unique=True) phone = db.Column(db.String(30)) active = db.Column(db.Boolean(), default=True) last_presence = db.Column(db.Date()) password = db.Column(db.Binary(60)) shotgun_id = db.Column(db.Integer, unique=True) timezone = db.Column(TimezoneType(backend="pytz"), default=pytz_timezone("Europe/Paris")) locale = db.Column(LocaleType, default=Locale("en", "US")) data = db.Column(JSONB) skills = db.relationship("Department", secondary=department_link) def __repr__(self): return "<Person %s>" % self.full_name() def full_name(self): if sys.version_info[0] < 3: return "%s %s" % (self.first_name.encode("utf-8"), self.last_name.encode("utf-8")) else: return "%s %s" % (self.first_name, self.last_name)
def check_captured_on(self): if ((self.captured_on_year is None) or (self.captured_on_month is None) or (self.captured_on_day is None)): return tz = timezone.get_default_timezone() if self.captured_on_timezone: tz = pytz_timezone(self.captured_on_timezone) if self.captured_on is not None: captured_on = timezone.localtime(self.captured_on, timezone=tz) else: captured_on = timezone.localtime(timezone=tz) captured_on = captured_on.replace(year=self.captured_on_year, month=self.captured_on_month, day=self.captured_on_day) if ((self.captured_on_hour is not None) and (self.captured_on_minute is not None) and (self.captured_on_second is not None)): captured_on = captured_on.replace(hour=self.captured_on_hour, minute=self.captured_on_minute, second=self.captured_on_second) self.captured_on = captured_on
def send_instance_email(username, instance_id, instance_name, ip, launched_at, linuxusername): """ Sends an email to the user providing information about the new instance. Returns a boolean. """ format_string = '%b, %d %Y %H:%M:%S' username, user_email, user_name = user_email_info(username) launched_at = launched_at.replace(tzinfo=None) utc_date = django_timezone.make_aware(launched_at, timezone=pytz_timezone('UTC')) local_launched_at = django_timezone.localtime(utc_date) context = { "user": user_name, "id": instance_id, "name": instance_name, "ip": ip, "sshuser": linuxusername, "launched_at": launched_at.strftime(format_string), "local_launched_at": local_launched_at.strftime(format_string) } body = render_to_string("core/email/instance_ready.html", context=Context(context)) subject = 'Your Atmosphere Instance is Available' return email_from_admin(username, subject, body)
def get_timezone( self, postgres_format: bool = False ) -> Union[timezone_native, pytz_timezone, str]: """Get the network timezone @TODO define crawl timezones vs network timezone """ # If a fixed offset is defined for the network use that if self.offset: tz = pytz.FixedOffset(self.offset) # If the network alternatively defines a timezone if not tz and self.timezone: tz = pytz_timezone(self.timezone) # Default to current system timezone if not tz: tz = get_current_timezone() if postgres_format: tz = str(tz)[:3] return tz
def send_instance_email(user, instance_id, instance_name, ip, launched_at, linuxusername): """ Sends an email to the user providing information about the new instance. Returns a boolean. """ launched_at = launched_at.replace(tzinfo=None) body = """ The atmosphere instance <%s> is running and ready for use. Your Instance Information: * Name: %s * IP Address: %s * SSH Username: %s * Launched at: %s UTC (%s Arizona time) Please terminate instances when they are no longer needed. This e-mail notification was auto-generated after instance launch. Helpful links: Atmosphere Manual: Using Instances * https://pods.iplantcollaborative.org/wiki/display/atmman/Using+Instances Atmosphere E-mail Support * [email protected] """ % (instance_id, instance_name, ip, linuxusername, launched_at.strftime('%b, %d %Y %H:%M:%S'), django_timezone.localtime( django_timezone.make_aware( launched_at, timezone=pytz_timezone('UTC'))).strftime('%b, %d %Y %H:%M:%S')) subject = 'Your Atmosphere Instance is Available' return email_from_admin(user, subject, body)
class AutorisationModelTests(TestCase): @classmethod def setUpTestData(cls): cls.aidant_marge = AidantFactory(username="******") cls.aidant_patricia = AidantFactory(username="******") cls.usager_homer = UsagerFactory() cls.usager_ned = UsagerFactory(family_name="Flanders", sub="nedflanders") cls.mandat_marge_homer_6 = MandatFactory( organisation=cls.aidant_marge.organisation, usager=cls.usager_homer, expiration_date=timezone.now() + timedelta(days=6), ) cls.mandat_patricia_ned_6 = MandatFactory( organisation=cls.aidant_patricia.organisation, usager=cls.usager_ned, expiration_date=timezone.now() + timedelta(days=6), ) def test_saving_and_retrieving_autorisation(self): first_autorisation = AutorisationFactory( mandat=self.mandat_marge_homer_6, demarche="Carte grise", ) second_autorisation = AutorisationFactory( mandat=self.mandat_patricia_ned_6, demarche="Revenus", ) self.assertEqual(Autorisation.objects.count(), 2) self.assertEqual( first_autorisation.mandat.organisation, self.mandat_marge_homer_6.organisation, ) self.assertEqual(first_autorisation.demarche, "Carte grise") self.assertEqual(second_autorisation.mandat.usager.family_name, "Flanders") fake_date = datetime(2019, 1, 14, tzinfo=pytz_timezone("Europe/Paris")) @freeze_time(fake_date) def test_autorisation_expiration_date_setting(self): mandat = MandatFactory( organisation=self.aidant_marge.organisation, usager=self.usager_homer, expiration_date=timezone.now() + timedelta(days=3), ) autorisation = AutorisationFactory( mandat=mandat, demarche="Carte grise", ) self.assertEqual( autorisation.creation_date, datetime(2019, 1, 14, tzinfo=pytz_timezone("Europe/Paris")), ) self.assertEqual( autorisation.mandat.expiration_date, datetime(2019, 1, 17, tzinfo=pytz_timezone("Europe/Paris")), )
def setUp(self): self.connection = Connection() self.connection.state = "test_state" self.connection.code = "test_code" self.connection.nonce = "test_nonce" self.connection.usager = Usager.objects.create( given_name="Joséphine", family_name="ST-PIERRE", preferred_username="******", birthdate="1969-12-15", gender="female", birthplace="70447", birthcountry="99100", sub="test_sub", email="*****@*****.**", ) self.connection.expiresOn = datetime( 2012, 1, 14, 3, 21, 34, tzinfo=pytz_timezone("Europe/Paris")) self.connection.save() self.fc_request = { "grant_type": "authorization_code", "redirect_uri": "test_url.test_url", "client_id": "test_client_id", "client_secret": "test_client_secret", "code": "test_code", }
def send_instance_email(user, instance_id, instance_name, ip, launched_at, linuxusername): """ Sends an email to the user providing information about the new instance. Returns a boolean. """ launched_at = launched_at.replace(tzinfo=None) body = """ The atmosphere instance <%s> is running and ready for use. Your Instance Information: * Name: %s * IP Address: %s * SSH Username: %s * Launched at: %s UTC (%s Arizona time) Please terminate instances when they are no longer needed. This e-mail notification was auto-generated after instance launch. Helpful links: Atmosphere Manual: Using Instances * https://pods.iplantcollaborative.org/wiki/display/atmman/Using+Instances Atmosphere E-mail Support * [email protected] """ % (instance_id, instance_name, ip, linuxusername, launched_at.strftime('%b, %d %Y %H:%M:%S'), django_timezone.localtime( django_timezone.make_aware( launched_at, timezone=pytz_timezone('UTC'))) .strftime('%b, %d %Y %H:%M:%S')) subject = 'Your Atmosphere Instance is Available' return email_from_admin(user, subject, body)
def set_up(self): self.tz = pytz_timezone('America/Denver') self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') self.scheduler = CondorScheduler( name='test_scheduler', host='localhost', ) self.scheduler.save() self.tethysjob = TethysJob( name='test_tethysjob', description='test_description', user=self.user, label='test_label', ) self.tethysjob.save() self.tethysjob_execute_time = TethysJob( name='test_tethysjob_execute_time', description='test_description', user=self.user, label='test_label', execute_time=datetime(year=2018, month=1, day=1, tzinfo=self.tz), completion_time=datetime(year=2018, month=1, day=1, hour=1, tzinfo=self.tz), _status='VAR', _process_results_function=test_function ) self.tethysjob_execute_time.save()
def send_instance_email(user, instance_id, instance_name, ip, launched_at, linuxusername): """ Sends an email to the user providing information about the new instance. Returns a boolean. """ format_string = '%b, %d %Y %H:%M:%S' username, user_email, user_name = user_email_info(user) launched_at = launched_at.replace(tzinfo=None) utc_date = django_timezone.make_aware(launched_at, timezone=pytz_timezone('UTC')) local_launched_at = django_timezone.localtime(utc_date) context = { "user": user_name, "id": instance_id, "name": instance_name, "ip": ip, "sshuser": linuxusername, "launched_at": launched_at.strftime(format_string), "local_launched_at": local_launched_at.strftime(format_string) } body = render_to_string("core/email/instance_ready.html", context=Context(context)) subject = 'Your Atmosphere Instance is Available' return email_from_admin(user, subject, body)
def make_tz_aware(dattetime, tz=None): """ makes the datetime tz aware, if no tz is passed, uses the tz from settings """ if not tz: tz = pytz_timezone(settings.TIME_ZONE) return tz.localize(dattetime)
def check_captured_on(self): if self.captured_on is not None: return if self.captured_on_timezone: tz = pytz_timezone(self.captured_on_timezone) captured_on = datetime.datetime.now(tz=tz) else: captured_on = timezone.now() if (self.captured_on_year and self.captured_on_month and self.captured_on_day): captured_on.replace(year=self.captured_on_year, month=self.captured_on_month, day=self.captured_on_day) if (self.captured_on_hour and self.captured_on_minute and self.captured_on_second): captured_on.replace(hour=self.captured_on_hour, minute=self.captured_on_minute, second=self.captured_on_second) self.captured_on = captured_on
def set_up(self): self.tz = pytz_timezone('America/Denver') self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass') self.scheduler = CondorScheduler( name='test_scheduler', host='localhost', ) self.scheduler.save() self.tethysjob = TethysJob( name='test_tethysjob', description='test_description', user=self.user, label='test_label', ) self.tethysjob.save() self.tethysjob_execute_time = TethysJob( name='test_tethysjob_execute_time', description='test_description', user=self.user, label='test_label', execute_time=datetime(year=2018, month=1, day=1, tzinfo=self.tz), completion_time=datetime(year=2018, month=1, day=1, hour=1, tzinfo=self.tz), _status='VAR', _process_results_function=test_function) self.tethysjob_execute_time.save()
def timezone(self, value): self._timezone = value if value: self.tz = pytz_timezone(value) self.tzname = utc.localize(datetime.utcnow()).astimezone(self.tz).tzname() else: self.tz = utc self.tzname = 'UTC'
def get_localized_time(self, utc_timestamp): """Return localized datetime from a timestamp""" conf_tz = self.conf.timezone local_tz = pytz_timezone(conf_tz) utc_tz = pytz_utc # Localize the date string as a UTC datetime, then use astimezone to convert it to the local timezone local_dt = utc_tz.localize(datetime.utcfromtimestamp(utc_timestamp)).astimezone(local_tz) return local_dt.strftime('%Y-%m-%d at %H:%M %Z')
def text(self): tz = pytz_timezone('America/Lima') start_tz = self.fecha.astimezone(tz) return "%s / %s / %s / %s / Camas: %s" % ( start_tz.strftime("%Y-%m-%d"), self.nombre, self.estacion, self.habitacion, str(self.ncamas))
def parse_timezone(timezone_string): if not isinstance(timezone_string, str): raise TypeError('Timezone must be a string') if timezone_string not in all_timezones: raise ValueError(f'{timezone_string} not a valid timezone') return pytz_timezone(timezone_string)
def get_localized_time(self, utc_timestamp): """Return localized datetime from a timestamp""" conf_tz = self.conf.timezone local_tz = pytz_timezone(conf_tz) utc_tz = pytz_utc # Localize the date string as a UTC datetime, then use astimezone to convert it to the local timezone local_dt = utc_tz.localize( datetime.utcfromtimestamp(utc_timestamp)).astimezone(local_tz) return local_dt.strftime('%Y-%m-%d at %H:%M %Z')
def send_instance_email(username, instance_id, instance_name, ip, launched_at, linuxusername, user_failure=False, user_failure_message=""): """ Sends an email to the user providing information about the new instance. Returns a boolean. """ format_string = '%b, %d %Y %H:%M:%S' email_template = get_email_template() try: instance = Instance.objects.get(provider_alias=instance_id) author = instance.created_by provider_location = instance.provider.location ssh_keys = author.sshkey_set.all() use_ssh_keys = author.userprofile.use_ssh_keys and ssh_keys.count() > 0 except: raise provider_location = "N/A" ssh_keys = [] use_ssh_keys = False username, user_email, user_name = user_email_info(username) launched_at = launched_at.replace(tzinfo=None) utc_date = django_timezone.make_aware(launched_at, timezone=pytz_timezone('UTC')) local_launched_at = django_timezone.localtime(utc_date) getting_started_link = email_template.get_link('getting-started') faq_link = email_template.get_link('faq') support_email = settings.SUPPORT_EMAIL context = { "getting_started_instances_link": getting_started_link.href, "getting_started_instances_name": getting_started_link.topic, "faq_link": faq_link.href, "faq_link_name": faq_link.topic, "use_ssh_keys": use_ssh_keys, "ssh_keys": ssh_keys, "provider_location": provider_location, "support_email": support_email, "support_email_header": email_template.email_header, "support_email_footer": email_template.email_footer, "user": user_name, "site_name": settings.SITE_NAME, "instance_id": instance_id, "instance_name": instance_name, "instance_ip": ip, "sshuser": linuxusername, "user_failure": user_failure, "user_failure_message": user_failure_message, "launched_at": launched_at.strftime(format_string), "local_launched_at": local_launched_at.strftime(format_string) } body = render_to_string( "core/email/instance_ready.html", context=Context(context)) subject = 'Your Atmosphere Instance is Available' email_args = (username, subject, body) return email_from_admin(*email_args)
def test_autorisation_expiration_date_setting(self): mandat = MandatFactory( organisation=self.aidant_marge.organisation, usager=self.usager_homer, expiration_date=timezone.now() + timedelta(days=3), ) autorisation = AutorisationFactory( mandat=mandat, demarche="Carte grise", ) self.assertEqual( autorisation.creation_date, datetime(2019, 1, 14, tzinfo=pytz_timezone("Europe/Paris")), ) self.assertEqual( autorisation.mandat.expiration_date, datetime(2019, 1, 17, tzinfo=pytz_timezone("Europe/Paris")), )
def __init__(self, label=None, validators=None, format='%Y-%m-%d %I:%M%p', timezone=None, **kwargs): super(DateTimeField, self).__init__(label, validators, **kwargs) self.format = format self.timezone = timezone if timezone: self.tz = pytz_timezone(timezone) else: self.tz = utc
def test_datetime_in_current_timezone(): tz = pytz_timezone("Europe/Bratislava") flexmock(timezone, get_current_timezone=lambda: tz) date = datetime.date(2016, 1, 1) time = datetime.time(10, 0, 0) dt = datetime_in_current_timezone(date, time) fmt = '%Y-%m-%d %H:%M:%S %Z%z' assert dt.strftime(fmt) == "2016-01-01 10:00:00 CET+0100"
def new_clinvar_available(self): cv_year, cv_month, cv_day = [int(x) for x in re.search( r'_(20[0-9][0-9])([01][0-9])([0-3][0-9])\.vcf', clinvar_update.latest_vcf_filename('b37')).groups()] cv_latest = datetime.datetime( cv_year, cv_month, cv_day + 1, 0, 0, 0, tzinfo=pytz_timezone('US/Eastern')) if self.last_processed and self.last_processed > cv_latest: return False return True
def order_detail_step_changed(request, order_pk, local_timezone, step_pk, checked): #import ipdb; ipdb.set_trace() order = Order.objects.get(pk=order_pk) disabled_steps = order.disabled_steps() if step_pk in disabled_steps: messages.error(request, "Something went wrong! You weren't suppose to be able to {} that step!".format('check' if checked else 'uncheck')) return simplejson.dumps({'error': True}) # timezone.now() is in UTC. current_time = timezone.now() # The first step is STEPS[0]. attr, code, task = order.STEPS[step_pk - 1] setattr(order, attr, current_time if checked else None) order.save() new_comment = Comment(order=order, action_type=code, created=current_time) new_comment.comment = new_comment.get_action_type_description(code).format(user=request.user.get_full_name(), action="checked" if checked else "unchecked") new_comment.save() # Convert current_time to the local time depending on the local_timezone. local_current_time = current_time.astimezone(pytz_timezone(local_timezone)) # dateformat.format() formats the time as Django would format it in the template. Displaying time format like 'midnight' and '6 pm' (as oppose to '6:00' pm). formatted_local_current_time = dateformat.format(local_current_time, 'F j, Y, P') return simplejson.dumps({ 'error': False, 'checked': checked, 'disabled_steps': order.disabled_steps(), 'total_steps': order.total_steps(), 'step_pk': step_pk, 'datetime': formatted_local_current_time, 'user': request.user.get_full_name(), 'comment_pk': new_comment.pk, 'comment': new_comment.comment })
def get(self, request, format=None): """ Get all the recent courses. """ param = { 'offset': int(request.GET.get('offset', 0)), 'limit': int(request.GET.get('limit', 10)), } screen = get_screen_size(request) thumbnail_size = get_thumbnail_size(screen, position='detail') local_tz = pytz_timezone(settings.TIME_ZONE) now = datetime.now(local_tz) recent_time = datetime(now.year, now.month, now.day, tzinfo=local_tz) recent_utc_time = recent_time.astimezone(UTC) verified_courses_id = CourseMode.get_verified_option_courses() query = Course.exclude_vpc().filter( status__gte=0, start__gt=recent_utc_time, course_type=0).exclude(course_id__in=verified_courses_id).order_by('start') total = query.count() courses = query[param['offset']:param['offset'] + param['limit']] result = { "courses": CourseSerializer(thumbnail_size, courses, many=True).data, "total": total, } return Response(result)
def get_tide_stage(self, begin_date, end_date, station, datum='MLLW', units='feet', time_zone='GMT', write_tide_data=False): tide_data = { 'LL': None, 'HH': None, 'L': None, 'H': None, 'PeakValue': None, 'ValleyValue': None } tide_stage = -9999 try: if self.use_raw: wlData = self.getWaterLevelRawSixMinuteData(begin_date.strftime('%Y%m%d'), end_date.strftime('%Y%m%d'), station, datum, units, time_zone) else: wlData = self.getWaterLevelVerifiedSixMinuteData(begin_date.strftime('%Y%m%d'), end_date.strftime('%Y%m%d'), station, datum, units, time_zone) except (WebFault,Exception) as e: if self.logger: self.logger.exception(e) tz_obj = None if time_zone == 'GMT': tz_obj = pytz_timezone('UTC') #if begin_time == tz_obj.localize(datetime.strptime('2001-08-27 04:00:00', '%Y-%m-%d %H:%M:%S')): # i = 0 try: start_time_ndx = end_date - timedelta(hours=10) end_time_ndx = end_date + timedelta(hours=10) start_ndx = None end_ndx = None for ndx in range(0, len(wlData.item)): wl_time = tz_obj.localize(datetime.strptime(wlData.item[ndx]['timeStamp'], '%Y-%m-%d %H:%M:%S.0')) if start_ndx is None and wl_time >= start_time_ndx: start_ndx = ndx if end_ndx is None and wl_time > end_time_ndx: end_ndx = ndx-1 #tide_recs = wlData.item[start_ndx:end_ndx] tide_recs = wlData.item[start_ndx:end_ndx] #self.find_tide_change_points(tide_recs, chordLen, tide_data) recs = [tide_recs[ndx]['WL'] for ndx, data in enumerate(tide_recs)] #Get RMS of data #maxtab, mintab = peakdet(recs, 0.08) pda_maxtab, pda_mintab = pda_peakdetect(recs, None, 10, 0, False) #zero_maxtab, zero_mintab = peakdetect_zero_crossing(y_axis=recs, x_axis=None, window=13) #Sort the maxs and mins by value then date. #max_sorted = sorted(pda_maxtab, key=lambda rec: (tide_recs[int(rec[0])]['WL'], tide_recs[int(rec[0])]['timeStamp'])) #min_sorted = sorted(pda_mintab, key=lambda rec: (tide_recs[int(rec[0])]['WL'], tide_recs[int(rec[0])]['timeStamp'])) max_len = len(pda_maxtab) - 1 tide_data['HH'] = { 'value': tide_recs[int(pda_maxtab[max_len][0])]['WL'], 'date': tide_recs[int(pda_maxtab[max_len][0])]['timeStamp'] } if max_len > 0: tide_data['H'] = { 'value': tide_recs[int(pda_maxtab[max_len-1][0])]['WL'], 'date': tide_recs[int(pda_maxtab[max_len-1][0])]['timeStamp'] } max_len = len(pda_mintab) - 1 tide_data['LL'] = { 'value': tide_recs[int(pda_mintab[max_len][0])]['WL'], 'date': tide_recs[int(pda_mintab[max_len][0])]['timeStamp'] } if max_len > 0: tide_data['L'] = { 'value': tide_recs[int(pda_mintab[max_len-1][0])]['WL'], 'date': tide_recs[int(pda_mintab[max_len-1][0])]['timeStamp'] } tide_levels = ['H','HH', 'L', 'LL'] tide_changes = [tide_data[tide_level] for tide_level in tide_levels if tide_level in tide_data and tide_data[tide_level] is not None] tide_changes = sorted(tide_changes, key=lambda k: k['date']) #0 is Full stage, either Ebb or Flood, 100 is 1/4, 200 is 1/2 and 300 is 3/4. Below we add either #the 2000 for flood or 4000 for ebb. tide_stages = [0, 100, 200, 300] prev_tide_data_rec = None tolerance = timedelta(hours = 1) for tide_sample in tide_changes: if prev_tide_data_rec is not None: prev_date_time = tz_obj.localize(datetime.strptime(prev_tide_data_rec['date'], '%Y-%m-%d %H:%M:%S.0')) cur_date_time = tz_obj.localize(datetime.strptime(tide_sample['date'], '%Y-%m-%d %H:%M:%S.0')) if (end_date >= prev_date_time - tolerance or end_date >= prev_date_time + tolerance)\ and (end_date < cur_date_time - tolerance or end_date < cur_date_time + tolerance): prev_level = prev_tide_data_rec['value'] cur_level = tide_sample['value'] if prev_level < cur_level: tide_state = 2000 else: tide_state = 4000 #Now figure out if it is 0, 1/4, 1/2, 3/4 stage. We divide the time between the 2 tide changes #up into 4 pieces, then figure out where our query time falls. time_delta = cur_date_time - prev_date_time qtr_time = time_delta.total_seconds() / 4.0 prev_time = prev_date_time for i in range(0, 4): if end_date >= prev_time and end_date < (prev_time + timedelta(seconds=qtr_time)): tide_stage = tide_state + tide_stages[i] break prev_time = prev_time + timedelta(seconds=qtr_time) if tide_stage != -9999: break prev_tide_data_rec = tide_sample except Exception, e: if self.logger: self.logger.exception(e)
def flight_info_extended(faFlightID , departure_date=None, arrival_date=None): """Extended flight info base on flightaware flight ID Example: flight_info_extended("QTR1", departure_date=datetime.date(2016, 6, 14)) Example: flight_info_extended("DAL2824-1463808481-airline-0168") Response example: {'FlightInfoExResult': {'flights': [{ 'actualarrivaltime': 0, 'actualdeparturetime': 0, 'aircrafttype': 'A320', 'destination': 'MDSD', 'destinationCity': 'Punta Caucedo', 'destinationName': 'Las Americas', 'diverted': '', 'estimatedarrivaltime': 1464025020, 'faFlightID': 'JBU509-1463808444-airline-0037', 'filed_airspeed_kts': 460, 'filed_airspeed_mach': '', 'filed_altitude': 350, 'filed_departuretime': 1464011700, 'filed_ete': '03:32:00', 'filed_time': 1463808444, 'ident': 'JBU509', 'origin': 'KJFK', 'originCity': 'New York, NY', 'originName': 'John F Kennedy Intl', 'route': 'SHIPP LINND ROLLE ATUGI L454 GOUGH L454 LUCTI L454 MNDEZ M594 CERDA L453 ASIVO W37 KOBET G446 KERSO'}], 'next_offset': -1}} """ params = dict(ident=faFlightID) result = flight_aware("FlightInfoEx", params) if departure_date or arrival_date: flight_info_result = result.get('FlightInfoExResult') if flight_info_result and isinstance(flight_info_result, dict): flights = flight_info_result.get('flights') if flights and isinstance(flights, list): if departure_date: filtered_flights = [] for flight in flights: # origin tz orig_airport_icao_code = flight.get('origin') orig_airport_code = AIRPORTS_ICAO_TO_IATA.get(orig_airport_icao_code, orig_airport_icao_code) if orig_airport_code in FA_AIRPORTS: origin_tz = pytz_timezone(FA_AIRPORTS[orig_airport_code]['timezone']) else: origin_tz = None if datetime.datetime.fromtimestamp(flight.get('filed_departuretime'), origin_tz).date() == departure_date: filtered_flights.append(flight) flights = filtered_flights if arrival_date: filtered_flights = [] for flight in flights: # destination_tz destination_icao_code = flight.get('destination') dest_airport_code = AIRPORTS_ICAO_TO_IATA.get(destination_icao_code, destination_icao_code) if dest_airport_code in FA_AIRPORTS: destination_tz = pytz_timezone(FA_AIRPORTS[dest_airport_code]['timezone']) else: destination_tz = None if datetime.datetime.fromtimestamp(flight.get('estimatedarrivaltime'), destination_tz).date() == arrival_date: filtered_flights.append(flight) flights = filtered_flights result["FlightInfoExResult"]["flights"] = flights return result
def tz_obj(self): return pytz_timezone(self.description)
def send_notification(self, receiver_id, method, *events): """выполняет отправку оповещения для указанного пользователя указанным методом об указанных событиях (один или несколько) с одинаковыми app""" if not len(events): return if method != "email": print('Warning: notify method "%s" is not implemented.' % method) return app = events[0].app dst = Destination.objects.get(app=app) profile_cache_key = dst.cas_profile_url % receiver_id cached_profile = cache.get(profile_cache_key) try: if not cached_profile: cas_url = furl(dst.cas_profile_url % receiver_id) cas_url.args["api_key"] = dst.cas_api_key cas_url = cas_url.url profile_responce = requests.get(cas_url) if profile_responce.status_code != 200: print('Warning: "%s" returns %d.' % (cas_url, profile_responce.status_code)) return profile = json.loads(profile_responce.text) cache.set(profile_cache_key, profile, 10) # кладём загруженный профиль в кэш на 10с else: profile = cached_profile language = profile.get("language") tz = pytz_timezone(profile.get("tz_name")) translation.activate(language) timezone.activate(tz) if method == "email": email = profile.get("notification_email") if not email: print('Warning: "notification_email" in "%s" not present.' % cas_url) single = len(events) is 1 slug = self.template_slug if single else self.template_slug_mult slug %= language if single: context = LocalizedEventSerializer(events[0]).data else: context = LocalizedEventSerializer( events[:5], many=True ).data # помещаем в контекст макс. 5 событий mailer = DBMailerAPI(dst.mailer_api_key, dst.mailer_api_url) mailer.send(slug, email, context) except ValueError: print('Warning: "%s" returns non-json data.' % cas_url) except EmailSendError: print('Warning: email to "%s" was not send.' % email) finally: translation.deactivate() timezone.deactivate()
def weekdate(date, timezone=None): if timezone: tz = pytz_timezone(timezone) else: tz = app.config['tz'] return tz.normalize(utc.localize(date).astimezone(tz)).strftime("%a, %b %e")
def timezone(self, value): self._timezone = value if value: self.tz = pytz_timezone(value) else: self.tz = utc
def tzinfo_from_zone(self, zone): return pytz_timezone(zone)
def shortdate(date, timezone=None): if timezone: tz = pytz_timezone(timezone) else: tz = app.config['tz'] return tz.normalize(utc.localize(date).astimezone(tz)).strftime("%B %d, %Y")
def local_time(dt, fmt=None, tz_name='US/Pacific'): local_datetime = dt.astimezone(pytz_timezone(tz_name)) if not fmt: return local_datetime return local_datetime.strftime(fmt)
def save(self, *args, **kwargs): if self.date_taken is None: try: exif_date = self.EXIF.get('EXIF DateTimeOriginal', None) if exif_date is not None: d, t = str.split(exif_date.values) year, month, day = d.split(':') hour, minute, second = t.split(':') self.date_taken = datetime(int(year), int(month), int(day), int(hour), int(minute), int(second), tzinfo=pytz_timezone('US/Eastern')) except: pass if self.date_taken is None: self.date_taken = timezone.now() if self._get_pk_val(): self.clear_cache() self.sort_number = Photo.objects.filter(gallery=self.gallery).aggregate(Max('sort_number'))['sort_number__max'] logger.debug('Current max sort number: {0}'.format(self.sort_number)) if self.sort_number is None: self.sort_number = 1 else: self.sort_number += 1 logger.debug('Assigning sort number {0}'.format(self.sort_number)) super(Photo, self).save(*args, **kwargs) self.pre_cache()
def calcTideRange(self, beginDate, endDate, station, datum='MLLW', units='feet', timezone='GMT', smoothData=False, tideFileDir=None): #This is the dictionary we return. Its keys are the tide indicators: LL is Lowest Low Tide, L is Low Tide, HH Highest High Tide, H High tide. tideData = {} tideData['LL'] = None tideData['HH'] = None tideData['L'] = None tideData['H'] = None tideData['PeakValue'] = None tideData['ValleyValue'] = None tideData['tide_stage'] = None try: if self.use_raw: wlData = self.getWaterLevelRawSixMinuteData(beginDate.strftime('%Y%m%d'), endDate.strftime('%Y%m%d'), station, datum, units, timezone) else: wlData = self.getWaterLevelVerifiedSixMinuteData(beginDate.strftime('%Y%m%d'), endDate.strftime('%Y%m%d'), station, datum, units, timezone) except (WebFault,Exception) as e: if self.logger: self.logger.exception(e) else: chordLen = 10 #Determine the tide level using all the tide data points. #tideData['tide_stage'] = self.get_tide_stage(wlData, chordLen, endDate, timezone) smoothDataROC = array.array('d') rawDataROC = array.array('d') expSmoothedData = array.array('d') dataLen = len(wlData.item) ndx = 0 alpha = 0.5 utc_tz = pytz_timezone('UTC') start_ndx = None end_ndx = None for ndx in range(0, dataLen): wl_time = utc_tz.localize(datetime.strptime(wlData.item[ndx]['timeStamp'], '%Y-%m-%d %H:%M:%S.0')) if start_ndx is None and wl_time >= beginDate: start_ndx = ndx if end_ndx is None and wl_time > endDate: end_ndx = ndx-1 wlData.item = wlData.item[start_ndx:end_ndx] dataLen = len(wlData.item) for ndx in range(0, dataLen): valN = wlData.item[ndx]['WL'] #tidePts.append(valN) #data_ts = utc_tz.localize(datetime.strptime(wlData.item[ndx]['timeStamp'], '%Y-%m-%d %H:%M:%S.0')) #timePts.append(int(get_utc_epoch(data_ts))) #Then the formula for each successive point is (alpha * Xn) + (1-alpha) * Yn-1 #X is the original data, Yn-1 is the last smoothed data point, alpha is the smoothing constant. if ndx == 0: expSmoothedData.append(valN) tideMin1 = valN tideMax1 = valN tideMin2 = valN tideMax2 = valN else: timeStruct = utc_tz.localize(datetime.strptime(wlData.item[ndx]['timeStamp'], '%Y-%m-%d %H:%M:%S.0')) timeN = int(get_utc_epoch(timeStruct)) timeStruct = utc_tz.localize(datetime.strptime(wlData.item[ndx-1]['timeStamp'], '%Y-%m-%d %H:%M:%S.0')) timeN1 = int(get_utc_epoch(timeStruct)) #For each N+1 we now use the formula. Yn = (alpha * wlData.item[ndx]['WL']) + ((1 - alpha) * expSmoothedData[ndx-1]) expSmoothedData.append(Yn) smoothDataROC.append((expSmoothedData[ndx] - expSmoothedData[ndx-1]) / (timeN - timeN1)) #Calcuate the rateofchange #ROC for the raw data. valN1 = wlData.item[ndx-1]['WL'] rawDataROC.append((valN - valN1) / (timeN - timeN1)) #ndx = 0 a = None b = None c = None #dirChangeCnt = 0 midPt = chordLen / 2 #ptFound = False #stopProc = False #dataLen = len(wlData.item) #slopePositive = False #plt.plot(timePts,tidePts,'o', x_new, y_new) ##plt.xlim([timePts[0]-1, timePts[-1] + 1 ]) #plt.savefig('/users/danramage/tmp/out.png', dpi=96) if self.logger: self.logger.info("Checking Raw data.") self.find_tide_change_points(wlData.item, chordLen, tideData) """ tideChange = None changeNdx = None lastSlope = None for ndx in range(0, len(wlData.item)): a = wlData.item[ndx]['WL'] timeStamp = wlData.item[ndx].timeStamp if ndx + chordLen < dataLen - 1: c = wlData.item[ndx+chordLen]['WL'] else: stopProc = True if tideChange is None: tideChange = a tide_change_ts = timeStamp if not stopProc: #Calc slope #Ascending if c - a > 0: if lastSlope == 0: if tideData['LL'] is None: #tideData['LL'] = tideChange tideData['LL'] = { 'value' : tideChange, 'date' : tide_change_ts } elif tideChange < tideData['LL']['value']: tmp = tideData['LL'] #tideData['LL'] = tideChange tideData['LL'] = { 'value' : tideChange, 'date' : tide_change_ts } tideData['L'] = tmp else: tideData['L'] = { 'value' : tideChange, 'date' : tide_change_ts } #print("Tide Min at: %f@%s" %(tideChange,timeStamp)) if self.logger: self.logger.debug("Tide Min at: %f@%s" %(tideChange,tide_change_ts)) #Found the max tide, so another is not going to occur any quicker than the chord length, so increment the ndx. ndx += chordLen #Slope has changed direction. lastSlope = 1 continue lastSlope = 1 if(a > tideChange): tideChange = a tide_change_ts = timeStamp changeNdx = ndx #Descending elif c - a < 0: if lastSlope == 1: if tideData['HH'] is None: #tideData['HH'] = tideChange tideData['HH'] = { 'value' : tideChange, 'date' : tide_change_ts } elif tideChange > tideData['HH']['value']: tmp = tideData['HH'] #tideData['HH'] = tideChange tideData['HH'] = { 'value' : tideChange, 'date' : tide_change_ts } tideData['H'] = tmp else: tideData['H'] = { 'value' : tideChange, 'date' : tide_change_ts } #print("Tide Max at: %f@%s" %(tideChange,timeStamp)) if self.logger: self.logger.debug("Tide Max at: %f@%s" %(tideChange,tide_change_ts)) #Found the max tide, so another is not going to occur any quicker than the chord length, so increment the ndx. ndx += chordLen #Slope has changed direction. lastSlope = 0 continue lastSlope = 0 if a < tideChange: tideChange = a tide_change_ts = timeStamp changeNdx = ndx #Save off the highest and lowest values. if tideData['PeakValue'] is None or tideData['PeakValue']['value'] < a: tideData['PeakValue'] = {'value': a, 'date': timeStamp} if tideData['ValleyValue'] is None or tideData['ValleyValue']['value'] > a: tideData['ValleyValue'] = {'value': a, 'date': timeStamp} ndx += 1 """ if smoothData: print("Checking smoothed data.") dataLen = len(expSmoothedData) ndx = 0 ptFound = False stopProc = False while ndx < dataLen: a = expSmoothedData[ndx] if ndx + midPt < dataLen - 1: b = expSmoothedData[ndx+midPt] else: stopProc = True if ndx + chordLen < dataLen - 1: c = expSmoothedData[ndx+chordLen] else: stopProc = True if stopProc == False: #Calc slope if c - a > 0: if b > a and b > c: #print("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) if self.logger != None: self.logger.debug("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) ptFound = True elif c - a < 0: if b < a and b < c: #print("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) if self.logger: self.logger.debug("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) ptFound = True if ptFound == False: ndx += 1 else: ndx = ndx+midPt ptFound = False if tideFileDir != None: filename = "%s\\%s-%s.csv" %(tideFileDir,beginDate,endDate) tideFile = open(filename, "w") ndx = 0 dataLen = len(wlData.item) while ndx < dataLen: timeStruct = time.strptime(wlData.item[ndx]['timeStamp'], '%Y-%m-%d %H:%M:%S.0') seconds = time.mktime(timeStruct) medianROC = '' rawROC = '' smoothedData = '' if(ndx < len(rawDataROC)): rawROC = rawDataROC[ndx] smoothedROC = smoothDataROC[ndx] smoothedData = expSmoothedData[ndx] outbuf = "%s,%s,%s,%s,%s\n" %(seconds,wlData.item[ndx]['WL'], rawROC, smoothedData, smoothedROC) ndx += 1 tideFile.write(outbuf) tideFile.close() #If we didn't have all the inflection points, we'll use the peak/valley values for the missing one(s). return(tideData)
def __json_string_to_date_time(date_time_str): date_time_native = datetime.strptime(date_time_str, "%Y-%m-%d %H:%M") cet = pytz_timezone('Europe/Berlin') date_time_local = cet.localize(date_time_native) date_time_utc = date_time_local.astimezone(pytz.utc) return date_time_utc
def calcTideRangeExt(self, beginDate, endDate, station, datum='MLLW', units='feet', timezone='GMT', smoothData=False, tideFileDir=None, write_tide_data=False): #This is the dictionary we return. Its keys are the tide indicators: LL is Lowest Low Tide, L is Low Tide, HH Highest High Tide, H High tide. tideData = None pda_tide_data = None try: if self.use_raw: wlData = self.getWaterLevelRawSixMinuteDataExt(beginDate.strftime('%Y%m%d'), endDate.strftime('%Y%m%d'), station, datum, units, timezone) else: wlData = self.getWaterLevelVerifiedSixMinuteDataExt(beginDate.strftime('%Y%m%d'), endDate.strftime('%Y%m%d'), station, datum, units, timezone) except (WebFault, Exception) as e: if self.logger: self.logger.exception(e) else: tideData = {} tideData['LL'] = None tideData['HH'] = None tideData['L'] = None tideData['H'] = None tideData['PeakValue'] = None tideData['ValleyValue'] = None tideData['tide_stage'] = None chordLen = 10 #Determine the tide level using all the tide data points. #tideData['tide_stage'] = self.get_tide_stage(wlData, chordLen, endDate, timezone) smoothDataROC = array.array('d') rawDataROC = array.array('d') expSmoothedData = array.array('d') #dataLen = len(wlData.item) ndx = 0 alpha = 0.5 utc_tz = pytz_timezone('UTC') start_ndx = None end_ndx = None #for ndx in range(0, dataLen): #It's seemingly impossible to use object notation to navigate to the data. data_start_tag = wlData.Body.getchildren()[0].getchildren()[0].item dataLen = len(data_start_tag) for ndx in range(0, dataLen): wl_time = utc_tz.localize(datetime.strptime(data_start_tag[ndx]['timeStamp'].text, '%Y-%m-%d %H:%M:%S.0')) if start_ndx is None and wl_time >= beginDate: start_ndx = ndx if end_ndx is None and wl_time > endDate: end_ndx = ndx-1 data_start_tag = data_start_tag[start_ndx:end_ndx] dataLen = len(data_start_tag) for ndx in range(0, dataLen): valN = data_start_tag[ndx]['WL'] #tidePts.append(valN) #data_ts = utc_tz.localize(datetime.strptime(data_start_tag[ndx]['timeStamp'], '%Y-%m-%d %H:%M:%S.0')) #timePts.append(int(get_utc_epoch(data_ts))) #Then the formula for each successive point is (alpha * Xn) + (1-alpha) * Yn-1 #X is the original data, Yn-1 is the last smoothed data point, alpha is the smoothing constant. if ndx == 0: expSmoothedData.append(valN) tideMin1 = valN tideMax1 = valN tideMin2 = valN tideMax2 = valN else: timeStruct = utc_tz.localize(datetime.strptime(data_start_tag[ndx]['timeStamp'].text, '%Y-%m-%d %H:%M:%S.0')) timeN = int(get_utc_epoch(timeStruct)) timeStruct = utc_tz.localize(datetime.strptime(data_start_tag[ndx-1]['timeStamp'].text, '%Y-%m-%d %H:%M:%S.0')) timeN1 = int(get_utc_epoch(timeStruct)) #For each N+1 we now use the formula. Yn = (alpha * data_start_tag[ndx]['WL']) + ((1 - alpha) * expSmoothedData[ndx-1]) expSmoothedData.append(Yn) smoothDataROC.append((expSmoothedData[ndx] - expSmoothedData[ndx-1]) / (timeN - timeN1)) #Calcuate the rateofchange #ROC for the raw data. valN1 = data_start_tag[ndx-1]['WL'] rawDataROC.append((valN - valN1) / (timeN - timeN1)) #ndx = 0 a = None b = None c = None #dirChangeCnt = 0 midPt = chordLen / 2 #ptFound = False #stopProc = False #dataLen = len(wlData.item) #slopePositive = False #plt.plot(timePts,tidePts,'o', x_new, y_new) ##plt.xlim([timePts[0]-1, timePts[-1] + 1 ]) #plt.savefig('/users/danramage/tmp/out.png', dpi=96) if self.logger: self.logger.info("Checking Raw data.") self.find_tide_change_points(data_start_tag, chordLen, tideData) if smoothData: print("Checking smoothed data.") dataLen = len(expSmoothedData) ndx = 0 ptFound = False stopProc = False while ndx < dataLen: a = expSmoothedData[ndx] if ndx + midPt < dataLen - 1: b = expSmoothedData[ndx+midPt] else: stopProc = True if ndx + chordLen < dataLen - 1: c = expSmoothedData[ndx+chordLen] else: stopProc = True if stopProc == False: #Calc slope if c - a > 0: if b > a and b > c: #print("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) if self.logger != None: self.logger.debug("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) ptFound = True elif c - a < 0: if b < a and b < c: #print("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) if self.logger: self.logger.debug("Tide change at Ndx: %d Val: %f" %(ndx+midPt, b)) ptFound = True if ptFound == False: ndx += 1 else: ndx = ndx+midPt ptFound = False if tideFileDir != None: filename = "%s\\%s-%s.csv" %(tideFileDir,beginDate,endDate) tideFile = open(filename, "w") ndx = 0 dataLen = len(wlData.item) while ndx < dataLen: timeStruct = time.strptime(data_start_tag[ndx]['timeStamp'], '%Y-%m-%d %H:%M:%S.0') seconds = time.mktime(timeStruct) medianROC = '' rawROC = '' smoothedData = '' if(ndx < len(rawDataROC)): rawROC = rawDataROC[ndx] smoothedROC = smoothDataROC[ndx] smoothedData = expSmoothedData[ndx] outbuf = "%s,%s,%s,%s,%s\n" %(seconds,data_start_tag[ndx]['WL'], rawROC, smoothedData, smoothedROC) ndx += 1 tideFile.write(outbuf) tideFile.close() #If we didn't have all the inflection points, we'll use the peak/valley values for the missing one(s). recs = [data_start_tag[ndx]['WL'] for ndx, data in enumerate(data_start_tag)] #Get RMS of data #maxtab, mintab = peakdet(recs, 0.08) pda_maxtab, pda_mintab = pda_peakdetect(recs, None, 10, 0, False) pda_tide_data = {} pda_tide_data['LL'] = None pda_tide_data['HH'] = None pda_tide_data['L'] = None pda_tide_data['H'] = None pda_tide_data['PeakValue'] = None pda_tide_data['ValleyValue'] = None pda_tide_data['tide_stage'] = None try: if len(pda_maxtab) > 0: maxes = sorted(pda_maxtab, key=lambda rec: rec[1]) max_len = len(pda_maxtab) - 1 pda_tide_data['HH'] = { 'value': data_start_tag[int(maxes[max_len][0])]['WL'], 'date': data_start_tag[int(maxes[max_len][0])]['timeStamp'] } if max_len > 0: pda_tide_data['H'] = { 'value': data_start_tag[int(maxes[max_len-1][0])]['WL'], 'date': data_start_tag[int(maxes[max_len-1][0])]['timeStamp'] } if len(pda_mintab): mins = sorted(pda_mintab, key=lambda rec: rec[1], reverse=True) max_len = len(pda_mintab) - 1 pda_tide_data['LL'] = { 'value': data_start_tag[int(mins[max_len][0])]['WL'], 'date': data_start_tag[int(mins[max_len][0])]['timeStamp'] } if max_len > 0: pda_tide_data['L'] = { 'value': data_start_tag[int(mins[max_len-1][0])]['WL'], 'date': data_start_tag[int(mins[max_len-1][0])]['timeStamp'] } except Exception as e: if self.logger: self.logger.exception(e) if write_tide_data: with open('/Users/danramage/tmp/%s.csv' % (endDate.strftime('%Y-%m-%d_%H_%M')), 'w') as tide_data_out: for rec in data_start_tag: tide_data_out.write("%s,%f\n" % (rec['timeStamp'], rec['WL'])) return(tideData,pda_tide_data)
def calcTideRangePeakDetect(self, beginDate, endDate, station, datum='MLLW', units='feet', timezone='GMT', smoothData=False): #This is the dictionary we return. Its keys are the tide indicators: LL is Lowest Low Tide, L is Low Tide, HH Highest High Tide, H High tide. tideData = None pda_tide_data = None try: if self.use_raw: wlData = self.getWaterLevelRawSixMinuteDataExt(beginDate.strftime('%Y%m%d'), endDate.strftime('%Y%m%d'), station, datum, units, timezone) else: wlData = self.getWaterLevelVerifiedSixMinuteDataExt(beginDate.strftime('%Y%m%d'), endDate.strftime('%Y%m%d'), station, datum, units, timezone) except (WebFault, Exception) as e: if self.logger: self.logger.exception(e) else: utc_tz = pytz_timezone('UTC') start_ndx = None end_ndx = None #for ndx in range(0, dataLen): #It's seemingly impossible to use object notation to navigate to the data. data_start_tag = wlData.Body.getchildren()[0].getchildren()[0].item dataLen = len(data_start_tag) #Get the previous 24 hours of data we are interested in. for ndx in range(0, dataLen): wl_time = utc_tz.localize(datetime.strptime(data_start_tag[ndx]['timeStamp'].text, '%Y-%m-%d %H:%M:%S.0')) if start_ndx is None and wl_time >= beginDate: start_ndx = ndx if end_ndx is None and wl_time > endDate: end_ndx = ndx-1 data_start_tag = data_start_tag[start_ndx:end_ndx] recs = [data_start_tag[ndx]['WL'] for ndx, data in enumerate(data_start_tag)] pda_maxtab, pda_mintab = pda_peakdetect(recs, None, 10, 0, False) pda_tide_data = {} pda_tide_data['LL'] = None pda_tide_data['HH'] = None pda_tide_data['L'] = None pda_tide_data['H'] = None pda_tide_data['PeakValue'] = None pda_tide_data['ValleyValue'] = None pda_tide_data['tide_stage'] = None try: if len(pda_maxtab) > 0: maxes = sorted(pda_maxtab, key=lambda rec: rec[1]) max_len = len(pda_maxtab) - 1 pda_tide_data['HH'] = { 'value': data_start_tag[int(maxes[max_len][0])]['WL'], 'date': data_start_tag[int(maxes[max_len][0])]['timeStamp'] } if max_len > 0: pda_tide_data['H'] = { 'value': data_start_tag[int(maxes[max_len-1][0])]['WL'], 'date': data_start_tag[int(maxes[max_len-1][0])]['timeStamp'] } if len(pda_mintab): mins = sorted(pda_mintab, key=lambda rec: rec[1], reverse=True) max_len = len(pda_mintab) - 1 pda_tide_data['LL'] = { 'value': data_start_tag[int(mins[max_len][0])]['WL'], 'date': data_start_tag[int(mins[max_len][0])]['timeStamp'] } if max_len > 0: pda_tide_data['L'] = { 'value': data_start_tag[int(mins[max_len-1][0])]['WL'], 'date': data_start_tag[int(mins[max_len-1][0])]['timeStamp'] } tide_stage = self.calc_tide_stage(wlData, beginDate, endDate, pytz_timezone('UTC'), 10, True) pda_tide_data['tide_stage'] = tide_stage except Exception as e: if self.logger: self.logger.exception(e) return pda_tide_data
def get_flight_status_data(body): """ Get data about flight from FlightAware API - and format the output in a FB Flight update format """ flight_number = '{}{}'.format(body['ICAO'], body['Number']) departure_date = body.get('departure', '').split('T')[0] if departure_date: departure_date = datetime.datetime.strptime(departure_date, "%Y-%m-%d").date() else: departure_date = None arrival_date = body.get('arrival', '').split('T')[0] if arrival_date: arrival_date = datetime.datetime.strptime(arrival_date, "%Y-%m-%d").date() else: arrival_date = None if not arrival_date and not departure_date: next_flight = find_next_flight(flight_number) if not isinstance(next_flight, dict): return faFlightID = next_flight.get('InFlightInfoResult', {}).get('faFlightID') if not faFlightID: return {} else: faFlightID = flight_number FlightInfoExResult = flight_info_extended(faFlightID, departure_date=departure_date, arrival_date=arrival_date) flights = FlightInfoExResult.get('FlightInfoExResult', {}).get('flights') extended_info = flights[0] if flights else None if not extended_info: return {} faFlightID = extended_info.get('faFlightID') AirlineFlightInfoResult = flight_airline_info(faFlightID) fa_airline_info = AirlineFlightInfoResult.get('AirlineFlightInfoResult', {}) if not fa_airline_info: return # origin orig_airport_icao_code = extended_info.get('origin') orig_airport_code = AIRPORTS_ICAO_TO_IATA.get(orig_airport_icao_code, orig_airport_icao_code) if orig_airport_code in FA_AIRPORTS: origin_tz = pytz_timezone(FA_AIRPORTS[orig_airport_code]['timezone']) else: origin_tz = None filed_departuretime = extended_info['filed_departuretime'] actual_departuretime = extended_info.get('actualdeparturetime') depart_date = datetime.datetime.fromtimestamp(filed_departuretime, origin_tz) if actual_departuretime: actual_departuretime = datetime.datetime.fromtimestamp(actual_departuretime, origin_tz) # destination destination_icao_code = extended_info.get('destination') dest_airport_code = AIRPORTS_ICAO_TO_IATA.get(destination_icao_code, destination_icao_code) if dest_airport_code in FA_AIRPORTS: destination_tz = pytz_timezone(FA_AIRPORTS[dest_airport_code]['timezone']) else: destination_tz = None estimatedarrivaltime = extended_info['estimatedarrivaltime'] arrival_date = datetime.datetime.fromtimestamp(estimatedarrivaltime, destination_tz) return dict( flight_number=flight_number, number=body['Number'], airline_name=body['Name'], departure_airport={ "airport_code": orig_airport_code, "city":extended_info['originName'], "gate":fa_airline_info['gate_orig'], "terminal":fa_airline_info['terminal_orig'] }, arrival_airport={ "airport_code": dest_airport_code, "city": extended_info.get('destinationName'), "gate": fa_airline_info['gate_dest'], "terminal": fa_airline_info['terminal_dest'] }, flight_schedule={ "departure_time": depart_date, "departure_time_actual": actual_departuretime, "arrival_time": arrival_date, "boarding_time": "", } )
def enddate(date, timezone=None): if timezone: tz = pytz_timezone(timezone) else: tz = app.config['tz'] return tz.normalize(utc.localize(date).astimezone(tz)).strftime("%l:%M %p %e %b %Y")