def test_timestamp(self): tz_utc = tz.tzutc() tz_jst = tz.tzoffset('JST', 3600 * 9) tz_pst = tz.tzoffset('PST', 3600 * -8) def _total_second(td): return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 tz_local = tz.tzlocal() tz_local_offset = _total_second(tz_local.utcoffset(datetime.datetime.now())) assert tz_local_offset == 3600 * 9, 'run test in JST' # assume running test in JST locale machine d1 = datetime.datetime(1999, 1, 23, 14, 44, 53, 33451, tz_jst) ts1 = soxtimestamp.timestamp(d1) eq_(ts1, '1999-01-23T14:44:53.33451+09:00') d2 = datetime.datetime(2014, 4, 1, 12, 34, 56, 54243, tz_utc) ts2 = soxtimestamp.timestamp(d2) eq_(ts2, '2014-04-01T12:34:56.54243Z') d3 = datetime.datetime(1985, 5, 12, 0, 0, 0, 111, tz_pst) ts3 = soxtimestamp.timestamp(d3) eq_(ts3, '1985-05-12T00:00:00.111-08:00') d4 = datetime.datetime(2001, 12, 31, 1, 2, 3) ts4 = soxtimestamp.timestamp(d4) eq_(ts4, '2001-12-31T01:02:03+09:00') # assuming system timezone is JST d5 = datetime.datetime(2002, 1, 2, 18, 22, 59, 1234) ts5 = soxtimestamp.timestamp(d5) eq_(ts5, '2002-01-02T18:22:59.1234+09:00')
def test_datetime(self): check = DateTime() self.assertEqual(check('2017-09-01 23:59'), datetime.datetime(2017, 9, 1, 23, 59)) self.assertEqual( check('Fri Sep 1 23:59:59 UTC 2017'), datetime.datetime(2017, 9, 1, 23, 59, 59, tzinfo=tzutc())) self.assertEqual( check('Fri Sep 1 23:59:59 2017'), datetime.datetime(2017, 9, 1, 23, 59, 59)) self.assertEqual( check('Fri, 1 Sep 2017 23:59:59 -0300'), datetime.datetime(2017, 9, 1, 23, 59, 59, tzinfo=tzoffset(None, -10800))) self.assertEqual( check('2017-09-01T23:59:59.5-03:00'), datetime.datetime(2017, 9, 1, 23, 59, 59, 500000, tzinfo=tzoffset(None, -10800))) self.assertEqual( check('20170901T235959.5-0300'), datetime.datetime(2017, 9, 1, 23, 59, 59, 500000, tzinfo=tzoffset(None, -10800))) self.assertEqual( check('20170901T235959-0300'), datetime.datetime(2017, 9, 1, 23, 59, 59, tzinfo=tzoffset(None, -10800))) self.assertEqual(check('2017-09-01T23:59:59'), datetime.datetime(2017, 9, 1, 23, 59, 59)) self.assertEqual(check('20170901T235959'), datetime.datetime(2017, 9, 1, 23, 59, 59)) self.assertEqual(check('20170901235959'), datetime.datetime(2017, 9, 1, 23, 59, 59)) self.assertEqual(check('2017-09-01T23:59'), datetime.datetime(2017, 9, 1, 23, 59)) self.assertEqual(check('20170901T2359'), datetime.datetime(2017, 9, 1, 23, 59)) self.assertEqual(check('2017-09-01T23'), datetime.datetime(2017, 9, 1, 23)) self.assertEqual(check('20170901T23'), datetime.datetime(2017, 9, 1, 23)) self.assertEqual(check('2017-09-01'), datetime.datetime(2017, 9, 1)) self.assertEqual(check('20170901'), datetime.datetime(2017, 9, 1)) self.assertEqual(check('09-01-2017'), datetime.datetime(2017, 9, 1)) self.assertEqual(check('09-01-17'), datetime.datetime(2017, 9, 1)) self.assertEqual(check('2017.Sep.01'), datetime.datetime(2017, 9, 1)) self.assertEqual(check('2017/09/01'), datetime.datetime(2017, 9, 1)) self.assertEqual(check('2017 09 01'), datetime.datetime(2017, 9, 1)) self.assertEqual(check('1st of September 2017'), datetime.datetime(2017, 9, 1)) # Note: to equality here we need to pass extra params to parse() method self.assertNotEqual(check('01-09-2017'), datetime.datetime(2017, 9, 1))
def isoparse(ds): try: dt = datetime.datetime.strptime(ds[:26].replace(' ', 'T'), '%Y-%m-%dT%H:%M:%S.%f') try: offset = ds[26:].replace(':', '') delta = datetime.timedelta(hours=int(offset[:-2]), minutes=int(offset[-2:])) dt = dt.replace(tzinfo=tzoffset(None, int(delta.total_seconds()))) except Exception: pass except Exception: try: dt = datetime.datetime.strptime(ds[:19].replace(' ', 'T'), '%Y-%m-%dT%H:%M:%S') try: offset = ds[19:].replace(':', '') delta = datetime.timedelta(hours=int(offset[:-2]), minutes=int(offset[-2:])) dt = dt.replace(tzinfo=tzoffset(None, int(delta.total_seconds()))) except Exception: pass except Exception: try: dt = datetime.datetime.strptime(ds[:15], '%H:%M:%S.%f') except Exception: try: dt = datetime.datetime.strptime(ds[:8], '%H:%M:%S') except Exception: try: dt = datetime.datetime.strptime(ds[:8], '%H:%M') except Exception: dt = datetime.datetime.strptime(ds[:10], '%Y-%m-%d') return dt
def test_YYYY_MM_DDTHH_mm_ss_SZ(self): assertEqual( self.parser.parse_iso("2013-02-03T04:05:06.7+01:00"), datetime(2013, 2, 3, 4, 5, 6, 7, tzinfo=tz.tzoffset(None, 3600)), ) assertEqual( self.parser.parse_iso("2013-02-03T04:05:06.78+01:00"), datetime(2013, 2, 3, 4, 5, 6, 78, tzinfo=tz.tzoffset(None, 3600)), ) assertEqual( self.parser.parse_iso("2013-02-03T04:05:06.789+01:00"), datetime(2013, 2, 3, 4, 5, 6, 789, tzinfo=tz.tzoffset(None, 3600)), ) assertEqual( self.parser.parse_iso("2013-02-03T04:05:06.7891+01:00"), datetime(2013, 2, 3, 4, 5, 6, 7891, tzinfo=tz.tzoffset(None, 3600)), ) assertEqual( self.parser.parse_iso("2013-02-03T04:05:06.78912+01:00"), datetime(2013, 2, 3, 4, 5, 6, 78912, tzinfo=tz.tzoffset(None, 3600)), )
def test_YYYY_MM_DDTHH_mm_ss_SZ(self): assertEqual( self.parser.parse_iso('2013-02-03T04:05:06.7+01:00'), datetime(2013, 2, 3, 4, 5, 6, 700000, tzinfo=tz.tzoffset(None, 3600)) ) assertEqual( self.parser.parse_iso('2013-02-03T04:05:06.78+01:00'), datetime(2013, 2, 3, 4, 5, 6, 780000, tzinfo=tz.tzoffset(None, 3600)) ) assertEqual( self.parser.parse_iso('2013-02-03T04:05:06.789+01:00'), datetime(2013, 2, 3, 4, 5, 6, 789000, tzinfo=tz.tzoffset(None, 3600)) ) assertEqual( self.parser.parse_iso('2013-02-03T04:05:06.7891+01:00'), datetime(2013, 2, 3, 4, 5, 6, 789100, tzinfo=tz.tzoffset(None, 3600)) ) assertEqual( self.parser.parse_iso('2013-02-03T04:05:06.78912+01:00'), datetime(2013, 2, 3, 4, 5, 6, 789120, tzinfo=tz.tzoffset(None, 3600)) ) # Properly parse string with Z timezone assertEqual( self.parser.parse_iso('2013-02-03T04:05:06.78912Z'), datetime(2013, 2, 3, 4, 5, 6, 789120) )
def test_setting_tz(self): no_tz = datetime.now() gmt = no_tz.replace(tzinfo= tz.tzoffset(None, 0)) har.TIMEZONE = tz.tzoffset(None, 0) good_json = self.har_encoder.default(gmt) test_json = self.har_encoder.default(no_tz) self.assertEqual(good_json, test_json)
def test_timezone_aware_parser(self): """ Test the timezone_aware_parser method with different string formats """ # test case 1 string with timezone info tz_string = '2009/05/13 19:19:30 -0400' result = load_datetime_tz(tz_string) assert result.tzinfo == tzoffset(None, -14400) # test case 2 string with timezone info with a different format tz_string = '2004-04-09T21:39:00-08:00' result = load_datetime_tz(tz_string) assert result.tzinfo == tzoffset(None, -28800) # test case 3 string without timezone info, # expecting tzlocal() as timezone tz_string = str(datetime.now()) result = load_datetime_tz(tz_string) assert result.tzinfo == tzlocal() # test case 4 string with a wrong timezone format, # expecting tzlocal() as timezone tz_string = '16:08:12 05/08/03 AEST' result = load_datetime_tz(tz_string) assert result.tzinfo == tzlocal()
def test_multivalue_DA(self): """Write DA/DT/TM data elements..........""" multi_DA_expected = (date(1961, 8, 4), date(1963, 11, 22)) DA_expected = date(1961, 8, 4) tzinfo = tzoffset('-0600', -21600) multi_DT_expected = (datetime(1961, 8, 4), datetime(1963, 11, 22, 12, 30, 0, 0, tzoffset('-0600', -21600))) multi_TM_expected = (time(1, 23, 45), time(11, 11, 11)) TM_expected = time(11, 11, 11, 1) ds = read_file(datetime_name) # Add date/time data elements ds.CalibrationDate = MultiValue(DA, multi_DA_expected) ds.DateOfLastCalibration = DA(DA_expected) ds.ReferencedDateTime = MultiValue(DT, multi_DT_expected) ds.CalibrationTime = MultiValue(TM, multi_TM_expected) ds.TimeOfLastCalibration = TM(TM_expected) ds.save_as(datetime_out) # Now read it back in and check the values are as expected ds = read_file(datetime_out) self.assertSequenceEqual(multi_DA_expected, ds.CalibrationDate, "Multiple dates not written correctly (VR=DA)") self.assertEqual(DA_expected, ds.DateOfLastCalibration, "Date not written correctly (VR=DA)") self.assertSequenceEqual(multi_DT_expected, ds.ReferencedDateTime, "Multiple datetimes not written correctly (VR=DT)") self.assertSequenceEqual(multi_TM_expected, ds.CalibrationTime, "Multiple times not written correctly (VR=TM)") self.assertEqual(TM_expected, ds.TimeOfLastCalibration, "Time not written correctly (VR=DA)") if os.path.exists(datetime_out): os.remove(datetime_out) # get rid of the file
def write_revision(repo, writer, commit, path): from posixpath import dirname tree_id = commit.tree tree = repo[tree_id] mode, blob_id = tree.lookup_path(repo.get_object, path) commit_time = datetime.fromtimestamp( commit.commit_time, tzoffset(None, commit.commit_timezone), ) author_time = datetime.fromtimestamp( commit.author_time, tzoffset(None, commit.author_timezone), ) page_blob = repo[blob_id] doctree = read_page_rst(page_blob.data) title = get_title(doctree) slug = slugify(title) docinfo = get_docinfo_as_dict(doctree) date = parse_date(docinfo['date']) status = docinfo['status'] page_tree_path = dirname(path) page_tree_mode, page_tree_id = tree.lookup_path( repo.get_object, page_tree_path, ) page_tree = repo[page_tree_id] attachments = git_storage.load_page_attachments(repo, page_tree) with writer.group(): writer.add_document( kind=u'revision', revision_date=date, revision_slug=unicode(slug), revision_title=unicode(title), revision_status=unicode(status), revision_path=unicode(path), revision_blob_id=unicode(blob_id), revision_commit_id=unicode(commit.id), revision_tree_id=unicode(tree_id), revision_author=unicode(commit.author), revision_committer=unicode(commit.committer), revision_author_time=author_time, revision_commit_time=commit_time, revision_message=unicode(commit.message), ) for attachment in attachments: write_revision_attachment(writer, attachment)
def dateutil_parse(timestr, default, ignoretz=False, tzinfos=None, **kwargs): """ lifted from dateutil to get resolution""" from dateutil import tz import time fobj = StringIO(str(timestr)) res = DEFAULTPARSER._parse(fobj, **kwargs) # dateutil 2.2 compat if isinstance(res, tuple): res, _ = res if res is None: raise ValueError("unknown string format") repl = {} reso = None for attr in ["year", "month", "day", "hour", "minute", "second", "microsecond"]: value = getattr(res, attr) if value is not None: repl[attr] = value reso = attr if reso is None: raise ValueError("Cannot parse date.") if reso == "microsecond": if repl["microsecond"] == 0: reso = "second" elif repl["microsecond"] % 1000 == 0: reso = "millisecond" ret = default.replace(**repl) if res.weekday is not None and not res.day: ret = ret + relativedelta.relativedelta(weekday=res.weekday) if not ignoretz: if callable(tzinfos) or tzinfos and res.tzname in tzinfos: if callable(tzinfos): tzdata = tzinfos(res.tzname, res.tzoffset) else: tzdata = tzinfos.get(res.tzname) if isinstance(tzdata, datetime.tzinfo): tzinfo = tzdata elif isinstance(tzdata, compat.string_types): tzinfo = tz.tzstr(tzdata) elif isinstance(tzdata, int): tzinfo = tz.tzoffset(res.tzname, tzdata) else: raise ValueError("offset must be tzinfo subclass, " "tz string, or int offset") ret = ret.replace(tzinfo=tzinfo) elif res.tzname and res.tzname in time.tzname: ret = ret.replace(tzinfo=tz.tzlocal()) elif res.tzoffset == 0: ret = ret.replace(tzinfo=tz.tzutc()) elif res.tzoffset: ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) return ret, reso
def test_get_adset(self): with self.replay(): fbads = FBAds(account_id='1378857852381224', access_token=self.get_test_access_token()) adset = fbads.set.get('6016248730082', fields=['name', 'start_time', 'end_time', 'daily_budget', 'lifetime_budget']) self.assertEqual(adset.id, '6016248730082') self.assertEqual(adset.name, 'Test campaign') self.assertEqual(adset.start_time, datetime(2014, 5, 1, 0, 0, 0, tzinfo=tzoffset(None, -25200))) self.assertEqual(adset.end_time, datetime(2014, 5, 5, 0, 0, tzinfo=tzoffset(None, -25200))) self.assertEqual(adset.lifetime_budget, Decimal('10.00'))
def setUp(self): """ Run before each test method is run """ del self.alert.locations[:] self.alert.add_all_locations([(00.00, 00.00), (11.11, 11.11)]) self.alert.onsetDate = datetime(1997, 7, 16, 19, 20, tzinfo=tzoffset(None, 3600)) self.alert.expireDate = datetime(2000, 7, 16, 19, 20, tzinfo=tzoffset(None, 3600)) self.alert.isUpdate = False
def test_get_tz_str_iso(self): result = self.time_zone._get_tzinfo("+01:02") self.assert_tzinfo_equal(result, tz.tzoffset(None, 3720)) result = self.time_zone._get_tzinfo("-01:02") self.assert_tzinfo_equal(result, tz.tzoffset(None, -3720))
def test_parse_str_iso(self): result = self.time_zone._parse('+01:02') self.assert_tzinfo_equal(result, tz.tzoffset(None, 3720)) result = self.time_zone._parse('-01:02') self.assert_tzinfo_equal(result, tz.tzoffset(None, -3720))
def _parse_date(txt): """Parse an RFC 2822 timestamp, and converts it to a datetime object in utc.""" datetime_tuple = parsedate_tz(txt) if datetime_tuple[9] == 0: return datetime.datetime(*datetime_tuple[:6]) tzinfo = tz.tzoffset("nonsense", datetime_tuple[9]) return datetime.datetime(*datetime_tuple[:6], tzinfo=tzinfo).astimezone(tz.tzoffset("UTC", 0)).replace(tzinfo=None)
def test_init_expireDate_happy_path(): alert = Alert({ 'id': None, 'onsetDate': datetime(1997, 7, 16, 19, 20, tzinfo=tzoffset(None, 3600)), 'expireDate': datetime(1997, 7, 16, 21, 20, tzinfo=tzoffset(None, 3600)), 'isUpdate': False, 'isCancel': False, 'locations':[] }) assert_not_equal(alert, None) assert_equal(alert.expireDate, parse("1997-07-16T21:20+01:00"))
def test_shipments(): with HTTMock(shipments_stub): api = PlazaAPI('api_key', 'api_secret', test=True) shipments = api.shipments.list(1) assert len(shipments) == 2 shipment = shipments[0] assert shipment.ShipmentDate == datetime( 2016, 9, 19, 18, 21, 59, 324000, tzinfo=tzoffset(None, 7200)) assert shipment.ExpectedDeliveryDate == datetime( 2016, 9, 19, 0, 0, tzinfo=tzoffset(None, 7200))
def setup_class(self): """ Setup code run before any tests are run """ self.alert = Alert({ 'id': 'testAlert', 'onsetDate': datetime(1997, 7, 16, 19, 20, tzinfo=tzoffset(None, 3600)), 'expireDate': datetime(2000, 7, 16, 19, 20, tzinfo=tzoffset(None, 3600)), 'isUpdate': False, 'isCancel': False, 'locations':[] })
def test_date_default(): naive = datetime.datetime.now() local = tzoffset('Local', -8 * 3600) other = tzoffset('Other', 2 * 3600) data = dict(naive=naive, utc=utcnow(), withtz=naive.replace(tzinfo=other)) with mock.patch.object(jsonutil, 'tzlocal', lambda : local): jsondata = json.dumps(data, default=jsonutil.date_default) assert "Z" in jsondata assert jsondata.count("Z") == 1 extracted = jsonutil.extract_dates(json.loads(jsondata)) for dt in extracted.values(): assert isinstance(dt, datetime.datetime) assert dt.tzinfo != None
def test_date_default(): naive = datetime.datetime.now() local = tzoffset('Local', -8 * 3600) other = tzoffset('Other', 2 * 3600) data = dict(naive=naive, utc=utcnow(), withtz=naive.replace(tzinfo=other)) with mock.patch.object(jsonutil, 'tzlocal', lambda : local): jsondata = json.dumps(data, default=jsonutil.date_default) nt.assert_in("Z", jsondata) nt.assert_equal(jsondata.count("Z"), 1) extracted = jsonutil.extract_dates(json.loads(jsondata)) for dt in extracted.values(): nt.assert_is_instance(dt, datetime.datetime) nt.assert_not_equal(dt.tzinfo, None)
def dateutil_parse(timestr, default, ignoretz=False, tzinfos=None, **kwargs): """ lifted from dateutil to get resolution""" from dateutil import tz import time res = DEFAULTPARSER._parse(StringIO(timestr), **kwargs) if res is None: raise ValueError("unknown string format") repl = {} for attr in ["year", "month", "day", "hour", "minute", "second", "microsecond"]: value = getattr(res, attr) if value is not None: repl[attr] = value reso = attr if reso == 'microsecond' and repl['microsecond'] == 0: reso = 'second' ret = default.replace(**repl) if res.weekday is not None and not res.day: ret = ret + relativedelta.relativedelta(weekday=res.weekday) if not ignoretz: if callable(tzinfos) or tzinfos and res.tzname in tzinfos: if callable(tzinfos): tzdata = tzinfos(res.tzname, res.tzoffset) else: tzdata = tzinfos.get(res.tzname) if isinstance(tzdata, datetime.tzinfo): tzinfo = tzdata elif isinstance(tzdata, basestring): tzinfo = tz.tzstr(tzdata) elif isinstance(tzdata, int): tzinfo = tz.tzoffset(res.tzname, tzdata) else: raise ValueError("offset must be tzinfo subclass, " "tz string, or int offset") ret = ret.replace(tzinfo=tzinfo) elif res.tzname and res.tzname in time.tzname: ret = ret.replace(tzinfo=tz.tzlocal()) elif res.tzoffset == 0: ret = ret.replace(tzinfo=tz.tzutc()) elif res.tzoffset: ret = ret.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) return ret, reso
def _parse_tzstr(self, tzstr, zero_as_utc=True): if tzstr == b'Z' or tzstr == b'z': return tz.tzutc() if len(tzstr) not in {3, 5, 6}: raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') if tzstr[0:1] == b'-': mult = -1 elif tzstr[0:1] == b'+': mult = 1 else: raise ValueError('Time zone offset requires sign') hours = int(tzstr[1:3]) if len(tzstr) == 3: minutes = 0 else: minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) if zero_as_utc and hours == 0 and minutes == 0: return tz.tzutc() else: if minutes > 59: raise ValueError('Invalid minutes in time zone offset') if hours > 23: raise ValueError('Invalid hours in time zone offset') return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60)
def _parse_str(tz_expr): _tzinfo = None name = None if tz_expr == 'local': _tzinfo = tz.gettz() else: iso_match = TimeZone.tz_re.match(tz_expr) if iso_match: sign, hours, minutes = iso_match.groups() seconds = int(hours) * 3600 + int(minutes) * 60 if sign == '-': seconds *= -1 _tzinfo = tz.tzoffset(None, seconds) else: _tzinfo = tz.gettz(tz_expr) return _tzinfo
def testDateTime(self): """DT conversion to datetime.datetime ...................................""" dicom_datetime = "1961" dt = valuerep.DT(dicom_datetime) datetime_datetime = datetime(1961, 1, 1) self.assertEqual(dt, datetime_datetime, "DT {0} not equal to datetime {1}".format(dicom_datetime, datetime_datetime)) dicom_datetime = "19610804" dt = valuerep.DT(dicom_datetime) datetime_datetime = datetime(1961, 8, 4) self.assertEqual(dt, datetime_datetime, "DT {0} not equal to datetime {1}".format(dicom_datetime, datetime_datetime)) dicom_datetime = "19610804192430.123" dt = valuerep.DT(dicom_datetime) datetime_datetime = datetime(1961, 8, 4, 19, 24, 30, 123000) self.assertEqual(dt, datetime_datetime, "DT {0} not equal to datetime {1}".format(dicom_datetime, datetime_datetime)) dicom_datetime = "196108041924-1000" dt = valuerep.DT(dicom_datetime) datetime_datetime = datetime(1961, 8, 4, 19, 24, 0, 0, tzoffset(None, -10 * 3600)) self.assertEqual(dt, datetime_datetime, "DT {0} not equal to datetime {1}".format(dicom_datetime, datetime_datetime)) self.assertEqual(dt.utcoffset(), timedelta(0, 0, 0, 0, 0, -10), "DT offset did not compare correctly to timedelta")
def time(hour=None, min=None, sec=None, micro=None, offset=None): """ Create a time only timestamp for the given instant. Unspecified components default to their current counterparts. Arguments: hour -- Integer value of the hour. min -- Integer value of the number of minutes. sec -- Integer value of the number of seconds. micro -- Integer value of the number of microseconds. offset -- Either a positive or negative number of seconds to offset from UTC to match a desired timezone, or a tzinfo object. """ now = dt.datetime.utcnow() if hour is None: hour = now.hour if min is None: min = now.minute if sec is None: sec = now.second if micro is None: micro = now.microsecond if offset is None: offset = tzutc() elif not isinstance(offset, dt.tzinfo): offset = tzoffset(None, offset) time = dt.time(hour, min, sec, micro, offset) return format_time(time)
def test_timestamp_to_datetime_tzoffset(self): #tzoffset from dateutil.tz import tzoffset tzinfo = tzoffset(None, 7200) expected = Timestamp('3/11/2012 04:00', tz=tzinfo) result = Timestamp(expected.to_datetime()) self.assertEquals(expected, result)
def test_timezone(self): logfile_path = os.path.join(os.path.dirname(mtools.__file__), 'test/logfiles/', 'mongod_26.log') mongod_26 = open(logfile_path, 'r') logfile = LogFile(mongod_26) assert logfile.timezone == tzoffset(None, -14400)
def generate(): yield '# HELP puppet_node Puppet inventory\n' yield '# TYPE puppet_node gauge\n' for node in nodes: yield 'puppet_node{' yield 'status="%s",' % node.status for fact, value in facts[node.name].iteritems(): yield fact yield '="' yield value.replace('\"', '\\"') yield '",' yield 'node="%s"' % node.name yield '} 1\n' yield '# HELP puppet_latest_report_timestamp ' yield 'UNIX epoch timestmap of the latest report\n' yield '# TYPE puppet_latest_report_timestamp counter\n' for node in nodes: if node.report_timestamp is None: continue unix_epoch = datetime.fromtimestamp(0, tzoffset('UTC', 0)) yield 'puppet_last_updated_timestamp{' yield 'node="%s"' % node.name yield '} ' yield str(int((node.report_timestamp-unix_epoch).total_seconds())) yield '\n'
def parse(cls, string): tzinfo = None if string == 'local': tzinfo = tz.tzlocal() elif string in ['utc', 'UTC']: tzinfo = tz.tzutc() else: iso_match = cls._TZINFO_RE.match(string) if iso_match: sign, hours, minutes = iso_match.groups() seconds = int(hours) * 3600 + int(minutes) * 60 if sign == '-': seconds *= -1 tzinfo = tz.tzoffset(None, seconds) else: tzinfo = tz.gettz(string) if tzinfo is None: raise ParserError('Could not parse timezone expression "{0}"', string) return tzinfo
def format_date(time_in_milliseconds, utc_offset_msec): ''' Create a datetime object from a time in milliseconds from the epoch ''' tz = tzoffset(None, utc_offset_msec / 1000.0) dt = datetime.fromtimestamp(time_in_milliseconds / 1000.0, tz) return datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
def test_dateutil_tzoffset_support(self): from dateutil.tz import tzoffset values = [188.5, 328.25] tzinfo = tzoffset(None, 7200) index = [ datetime(2012, 5, 11, 11, tzinfo=tzinfo), datetime(2012, 5, 11, 12, tzinfo=tzinfo) ] series = Series(data=values, index=index) self.assertEqual(series.index.tz, tzinfo) # it works! #2443 repr(series.index[0])
def setUp(self): self.tzinfos = {"BRST": -10800} self.brsttz = tzoffset("BRST", -10800) self.default = datetime(2003, 9, 25) # Parser should be able to handle bytestring and unicode base_str = '2014-05-01 08:00:00' try: # Python 2.x self.uni_str = unicode(base_str) self.str_str = str(base_str) except NameError: self.uni_str = str(base_str) self.str_str = bytes(base_str.encode())
def _geonames_lookup(self, place): search = json_webservice('http://ws.geonames.org/searchJSON', {'q': place, 'maxRows': 1, 'username': '******'}) if search['totalResultsCount'] == 0: return None city = search['geonames'][0] timezone = json_webservice('http://ws.geonames.org/timezoneJSON', {'lat': city['lat'], 'lng': city['lng'], 'username': '******'}) if 'timezoneId' in timezone: return gettz(timezone['timezoneId']) if 'rawOffset' in timezone: offset = timezone['rawOffset'] return tzoffset('UTC%s%s' % (offset>=0 and '+' or '', offset), offset*3600)
def test_selected_value_to_singer_value_impl_with_timestamp_tz_value_as_datetime_max( self): output = logical_replication.selected_value_to_singer_value_impl( datetime(9999, 12, 31, 23, 59, 59, 999999, tzinfo=tzoffset(None, -14400)), 'timestamp with time zone', None) self.assertEqual('9999-12-31T23:59:59.999+00:00', output)
def test_parsing_with_end_datetime(history, sample_chamberdata_record): """ _parse_mongo_chamberdata_record() takes an optional end_datetime arg. Here we test various values for that arg""" end_datetime = datetime(2019, 1, 1, 10, 31, 0, tzinfo=tzoffset("UTC+0", 0)) parsed = history._parse_mongo_chamberdata_record(sample_chamberdata_record, end_datetime=end_datetime) assert ( len(parsed) == 2 ), "The parsed data should have 2 timepoints for one-minute resolution and the specified end_datetime!" end_datetime = datetime(2019, 1, 1, 12, 0, 0, tzinfo=tzoffset("UTC+0", 0)) parsed = history._parse_mongo_chamberdata_record(sample_chamberdata_record, end_datetime=end_datetime) assert ( len(parsed) == 60 ), "The parsed data should have 60 timepoints for one-minute resolution and the specified end_datetime!" # An out of range end datetime: end_datetime = datetime(2019, 1, 1, 1, 0, 0, tzinfo=tzoffset("UTC+0", 0)) parsed = history._parse_mongo_chamberdata_record(sample_chamberdata_record, end_datetime=end_datetime) assert ( len(parsed) == 0 ), "The parsed data should have zero timepoints for one-minute resolution and the specified end_datetime!"
def test_should_return_date_and_time_with_timezone(self, mock_open): mock_open.return_value.__enter__.return_value = io.StringIO( '<ePismo dataPisma="2016-08-01T12:30:30.8929287+02:00"/>') parser = BankReplyParser('/path/to/file.xml') self.assertIsInstance(parser.date, datetime.datetime) self.assertEqual( datetime.datetime(2016, 8, 1, 12, 30, 30, 892928, tzinfo=tzoffset(None, 7200)), parser.date)
class Test_DateTime_is_type(object): @pytest.mark.parametrize( ["value", "strict_level", "expected"], list( itertools.product( [ datetime(2017, 3, 22, 10, 0, tzinfo=tzoffset(None, 32400)), date(2017, 3, 22) ], [StrictLevel.MIN, StrictLevel.MIN + 1, StrictLevel.MAX], [True], )) + list( itertools.product( [None, "invalid time string", 9223372036854775807, "100-0004"], [StrictLevel.MIN, StrictLevel.MIN + 1, StrictLevel.MAX], [False], )) + list( itertools.product( [ "2017-03-22T10:00:00+0900", 1485685623, "1485685623", "1524930937.003555" ], [StrictLevel.MIN], [True], )) + list( itertools.product( ["2017-03-22T10:00:00+0900", 1485685623, "1485685623"], [StrictLevel.MAX], [False])), ) def test_normal(self, value, strict_level, expected): type_object = class_under_test(value, strict_level) assert type_object.is_type() == expected assert type_object.typecode == Typecode.DATETIME @pytest.mark.parametrize( ["value", "strip_ansi_escape", "expected"], [ [colored("2017-03-22T10:00:00", "red"), False, False], [colored("2017-03-22T10:00:00", "red"), True, True], ], ) def test_normal_ansi(self, value, strip_ansi_escape, expected): type_checker = class_under_test(value, StrictLevel.MIN, strip_ansi_escape=strip_ansi_escape) assert type_checker.is_type() == expected assert type_checker.typecode == Typecode.DATETIME
def __format_date(self, date_string, is_parsed=False): #error handling if(date_string is None): return None #parses the date if needed parsed_datetime = date_string if is_parsed else parser.parse(date_string) #if timezone is present, uses that info, otherwise uses local timezone if(parsed_datetime.tzinfo is not None): return parsed_datetime.isoformat().replace("+", "%2B") else: local_tz = tz.tzoffset(None, self.__get_current_utc_offset()) return parsed_datetime.replace(tzinfo=local_tz).isoformat().replace("+", "%2B")
def test_format(self): """ L{Date.format} returns a string representation of the given datetime instance. """ parameter = Date("Test") date = datetime(2010, 9, 15, 23, 59, 59, tzinfo=tzoffset('UTC', 120 * 60)) self.assertEqual("2010-09-15T21:59:59Z", parameter.format(date))
def test_bound_iso_init(self, iso_response): bound_iso = BoundIso(client=mock.MagicMock(), data=iso_response['iso']) assert bound_iso.id == 4711 assert bound_iso.name == "FreeBSD-11.0-RELEASE-amd64-dvd1" assert bound_iso.description == "FreeBSD 11.0 x64" assert bound_iso.type == "public" assert bound_iso.deprecated == datetime.datetime(2018, 2, 28, 0, 0, tzinfo=tzoffset( None, 0))
def test_bound_image_init(self, image_response): bound_image = BoundImage(client=mock.MagicMock(), data=image_response['image']) assert bound_image.id == 4711 assert bound_image.type == "snapshot" assert bound_image.status == "available" assert bound_image.name == "ubuntu-20.04" assert bound_image.description == "Ubuntu 20.04 Standard 64 bit" assert bound_image.image_size == 2.3 assert bound_image.disk_size == 10 assert bound_image.created == datetime.datetime(2016, 1, 30, 23, 50, tzinfo=tzoffset( None, 0)) assert bound_image.os_flavor == "ubuntu" assert bound_image.os_version == "16.04" assert bound_image.rapid_deploy is False assert bound_image.deprecated == datetime.datetime(2018, 2, 28, 0, 0, tzinfo=tzoffset( None, 0)) assert isinstance(bound_image.created_from, BoundServer) assert bound_image.created_from.id == 1 assert bound_image.created_from.name == "Server" assert bound_image.created_from.complete is False assert isinstance(bound_image.bound_to, BoundServer) assert bound_image.bound_to.id == 1 assert bound_image.bound_to.complete is False
async def make_archive(channel_name, start_time, end_time, messages, users_dict, tz_offset): """ Construct a zip file of Slack messages containing a JSON and an HTML representation. channel_name: the human-readable Slack channel name. Used for file naming. start_time: the `time.struct_time` representing the beginning of the messages. end_time: the `time.struct_time` representing the end of the messages. messages: the array of message dicts as formatted by carmille.fetch. users_dict: a nested dict object in the format userid: {'display_name': display_name, 'icon_url': icon_url} . tz_offset: the requesting user's local time offset from UTC, in integer seconds. Note: relies on the following environment variables: S3_WEBSITE_PREFIX -- the entire string to put before the object name to get a place to download the file. e.g., https://carmille.supercoolhost.net """ logging.debug("Entering the archive process.") S3_WEBSITE_PREFIX = os.environ.get('S3_WEBSITE_PREFIX') letters = string.ascii_lowercase randstr = ''.join(random.choice(letters) for i in range(5)) os.mkdir(f"tmp/{randstr}") user_tz = tz.tzoffset(None, tz_offset) start_datetime = datetime.datetime.fromtimestamp(time.mktime(start_time)).astimezone(user_tz) end_datetime = datetime.datetime.fromtimestamp(time.mktime(end_time)).astimezone(user_tz) # Has extensions added to it filepart = f"{channel_name}_{start_datetime.strftime('%Y-%m-%d-%H-%M')}_to_{end_datetime.strftime('%Y-%m-%d-%H-%M')}" filename = f"tmp/{randstr}/{filepart}" zipfilename = f"tmp/{filepart}" await make_json(filename, messages, users_dict) # Only make_html needs user_tz, because it's the one that tries to do "human_readable" stuff. await make_html(filename, messages, users_dict, user_tz) shutil.make_archive(zipfilename, "zip", f"tmp/{randstr}") shutil.rmtree(f"tmp/{randstr}") logging.debug("Finished the archive process.") upload_result = await upload_archive("tmp", f"{filepart}.zip") if upload_result: logging.debug("Finished upload process.") os.remove(f"tmp/{filepart}.zip") return f"{S3_WEBSITE_PREFIX}/{filepart}.zip" else: return "Unfortunately, the archive failed. Look at the logs. Sorry!"
def daytime_hightide(ssh, times): """Finds the index of the daytime high tides. Daytime is defined between 0530 and 1830 PST. :arg ssh: the sea surface height values :type ssh: numpy array (1D) :arg times: the times corresponding to the ssh values in UTC :type times: numpy array of datetime objects :returns: inds, a list of indices for the daytime high tides.""" # Convert times to PST myPST = tz.tzoffset('myPST', -8 * 3600) times_pst = [d.astimezone(myPST) for d in times] times_pst = np.array(times_pst) # Loop through each day to = times[0] tf = times[-1] days = [ to + datetime.timedelta(days=int(n)) for n in np.arange((tf - to).days) ] max_inds = [] for day in days: # Define datetime to be between 0530 and 1830 daytime1 = day.replace(hour=5, minute=30, tzinfo=myPST) daytime2 = day.replace(hour=18, minute=30, tzinfo=myPST) inds = np.where( np.logical_and(times_pst >= daytime1, times_pst <= daytime2)) # Isolate ssh in day time and calculate the difference netween sshs ssh_daytime = ssh[inds] ssh_diff = np.diff(ssh_daytime) # Look for index of maximun daytime high tide. # Defualt is maximim of the tide, but this might occur on boundary # Then, look for a local max by finding where differences switch # from pos to neg max_ind = np.argmax(ssh_daytime) for i in np.arange(1, len(ssh_diff)): if ssh_diff[i] < 0 and ssh_diff[i - 1] > 0: max_ind = i # Find index of max tides and append to list max_time = times_pst[inds][max_ind].astimezone(tz=tz.tzutc()) max_inds.append(np.where(times == max_time)[0][0]) return max_inds
def test_timezone(self): # Arrange local_tz = tzoffset('BRST', -10800) local_date = datetime(2003, 9, 27, 9, 52, 43, tzinfo=local_tz) entity = self._create_random_base_entity_dict() entity.update({'date': local_date}) # Act self.ts.insert_entity(self.table_name, entity) resp = self.ts.get_entity(self.table_name, entity['PartitionKey'], entity['RowKey']) # Assert self.assertIsNotNone(resp) self.assertEqual(resp.date, local_date.astimezone(tzutc())) self.assertEqual(resp.date.astimezone(local_tz), local_date)
def test_to_python_iso8601(self): field = ISO8601DateTimeField(input_formats=[ISO_8601]) self.assertEqual(field.clean('2015-01-01 16:30'), datetime(2015, 1, 1, 16, 30)) self.assertEqual(field.clean('2015-01-01T16:30'), datetime(2015, 1, 1, 16, 30)) self.assertEqual(field.clean('2015-01-01T16:30+00:00'), datetime(2015, 1, 1, 16, 30).replace(tzinfo=pytz.UTC)) self.assertEqual( field.clean('2015-01-01T16:30+04:00'), datetime(2015, 1, 1, 16, 30).replace(tzinfo=tzoffset(None, 4 * 60 * 60))) with self.assertRaises(forms.ValidationError): field.clean('2015-01-01T16:30+A')
def create_event_json(techname, location, visittype, agentname, startdate): date_startdate = datetime.strptime(startdate, "%Y-%m-%d").date() year = date_startdate.year month = date_startdate.month day = date_startdate.day startdatetimestamp = datetime(year, month, day, 10, 0, tzinfo=tzoffset('None', +14400)) enddatetimestamp = datetime(year, month, day, 14, 0, tzinfo=tzoffset('None', +14400)) print('Start time: ' + str(startdatetimestamp)) print('End time: ' + str(enddatetimestamp)) event = { 'summary': visittype + " by " + techname, 'location': location, 'description': agentname + " scheduled " + visittype + " by " + techname + " at " + location + " for " + startdate, 'start': { 'dateTime': str(startdatetimestamp).replace(' ', 'T'), }, 'end': { 'dateTime': str(enddatetimestamp).replace(' ', 'T'), }, } return event
def _c_convert_timestamp(val): if not val: return None try: ret = _c_speedup.parse_date(val.strip()) except: ret = None if ret is None: return parse_date(val, as_utc=False) year, month, day, hour, minutes, seconds, tzsecs = ret try: return datetime(year, month, day, hour, minutes, seconds, tzinfo=tzoffset(None, tzsecs)).astimezone(local_tz) except OverflowError: return UNDEFINED_DATE.astimezone(local_tz)
def test_datetime(): yield check, '2020-01-20 12:33', \ datetime.datetime(2020, 1, 20, 12, 33), \ numbers.FORMAT_DATE_DATETIME, datetime.datetime yield check, '2020-01-20 12:33:22', \ datetime.datetime(2020, 1, 20, 12, 33, 22), \ numbers.FORMAT_DATE_DATETIME, datetime.datetime yield check, '2020-01-20 1:33:22PM', \ datetime.datetime(2020, 1, 20, 13, 33, 22), \ numbers.FORMAT_DATE_DATETIME, datetime.datetime yield check, '2020-01-20 09:33:22.890000-6:00', \ datetime.datetime(2020, 1, 20, 9, 33, 22, 890000, tzinfo=tzoffset(None, -21600)), \ numbers.FORMAT_DATE_DATETIME, datetime.datetime yield check, '2020-01-20 09:33:22.890000-6', \ datetime.datetime(2020, 1, 20, 9, 33, 22, 890000, tzinfo=tzoffset(None, -21600)), \ numbers.FORMAT_DATE_DATETIME, datetime.datetime yield check, datetime.datetime(2020, 1, 20, 11, 11), \ datetime.datetime(2020, 1, 20, 11, 11), \ numbers.FORMAT_DATE_DATETIME, datetime.datetime yield check, '2020-01-17T15:45:37.268000Z', \ datetime.datetime(2020, 1, 17, 15, 45, 37, 268000, tzinfo=tzlocal()), \ numbers.FORMAT_DATE_DATETIME, datetime.datetime
def test_make_date(): d = datetime.date(2000, 1, 1) assert make_date(d) == d d2 = datetime.datetime(2000, 1, 1, 0, 0) assert make_date(d2) == d d3 = '2000-01-01T00:00:00.000000Z' assert make_date(d3) == d test_tz = pytz.timezone('America/Cancun') assert make_date(d3, test_tz) != d test_tz = tz.tzoffset('IST', -3600) assert make_date(d3, test_tz) != d
def test_pass_datetime_non_utc(self): """ The incoming value is a datetime object that is already set to a non-UTC timezone. """ self.assertFilterPasses( datetime(2015, 6, 27, 10, 6, 32, tzinfo=tzoffset('UTC-5', -5 * 3600)), datetime(2015, 6, 27, 15, 6, 32, tzinfo=utc), )
def merge_datetime(year: int = None, month: int = None, day: int = None, hour: int = None, minute: int = None, second: int = None, timezone: tzinfo = tz.tzoffset('KST', 9 * 60 * 60)): now = datetime.now(timezone) return datetime(year=year if year is not None else now.year, month=month if month is not None else now.month, day=day if day is not None else now.day, hour=hour if hour is not None else now.hour, minute=minute if minute is not None else now.minute, second=second if second is not None else now.second, tzinfo=timezone)
def test_initialize_without_datetime_with_dateutil_timezone(self, mock_datetime_timezone): tz = tzoffset(None, -22500) dt = datetime(2015, 1, 1, tzinfo=tz) utcoffset = tz.utcoffset(None) total_seconds = ( (utcoffset.microseconds + (utcoffset.seconds + utcoffset.days * 24 * 3600) * 10 ** 6) / 10 ** 6) tz = pytz.FixedOffset(total_seconds / 60) dt = tz.normalize(dt) mock_datetime_timezone.return_value = dt do = delorean.Delorean(timezone=tz) self.assertEqual(do.datetime, dt) self.assertEqual(do.timezone, tz)
def end_time(self): ''' Get a string representation of the end time with UTC offset. ''' if self.end_time_notz is None: return None tz = tzoffset(None, self.utc_offset) et = self.end_time_notz dt = datetime(et.year, et.month, et.day, et.hour, et.minute, et.second, tzinfo=tz) return dt.strftime('%Y-%m-%d %H:%M:%S %z')
def test_to_taskwarrior(self): arbitrary_url = 'http://one' arbitrary_extra = { 'jira_version': 5, 'annotations': ['an annotation'], } issue = self.service.get_issue_for_record(self.arbitrary_record, arbitrary_extra) expected_output = { 'project': self.arbitrary_project, 'priority': (issue.PRIORITY_MAP[self.arbitrary_record['fields']['priority']]), 'annotations': arbitrary_extra['annotations'], 'tags': [], 'entry': datetime.datetime(2016, 6, 6, 6, 7, 8, 123000, tzinfo=tzoffset(None, -25200)), 'jirafixversion': '1.2.3', issue.URL: arbitrary_url, issue.FOREIGN_ID: self.arbitrary_record['key'], issue.SUMMARY: self.arbitrary_summary, issue.DESCRIPTION: None, issue.ESTIMATE: self.arbitrary_estimation / 60 / 60 } def get_url(*args): return arbitrary_url with mock.patch.object(issue, 'get_url', side_effect=get_url): actual_output = issue.to_taskwarrior() self.assertEqual(actual_output, expected_output)
def test_rfc_minimal(): expect = JSONFeed( version='https://jsonfeed.org/version/1', title='JSON Feed', home_page_url='https://jsonfeed.org/', feed_url='https://jsonfeed.org/feed.json', description='JSON Feed is a pragmatic syndication format for blogs, ' 'microblogs, and other time-based content.', user_comment='This feed allows you to read the posts from this site ' 'in any feed reader that supports the JSON Feed format. ' 'To add this feed to your reader, copy the following URL ' '— https://jsonfeed.org/feed.json — and add it your ' 'reader.', next_url=None, icon=None, favicon=None, author=JSONFeedAuthor( name='Brent Simmons and Manton Reece', url='https://jsonfeed.org/', avatar=None ), authors=[JSONFeedAuthor( name='Brent Simmons and Manton Reece', url='https://jsonfeed.org/', avatar=None )], language=None, expired=False, items=[JSONFeedItem( id_='https://jsonfeed.org/2017/05/17/announcing_json_feed', url='https://jsonfeed.org/2017/05/17/announcing_json_feed', external_url=None, title='Announcing JSON Feed', content_html=content_html, content_text=None, summary=None, image=None, banner_image=None, date_published=datetime.datetime(2017, 5, 17, 8, 2, 12, tzinfo=tzoffset(None, -25200)), date_modified=None, author=None, authors=[], language=None, tags=[], attachments=[])] ) assert parse_json_feed_file('tests/json_feed/jsonfeed.org.json') == expect
def __init__(self, app, logger, timezone, color='lightgreen', font='Liberation Sans', show_seconds=False): """Constructor for a clock object using a ginga canvas. """ self.logger = logger if isinstance(timezone, Bunch): self.timezone = timezone.location self.tzinfo = tz.tzoffset(timezone.location, timezone.time_offset) else: self.timezone = timezone self.tzinfo = pytz.timezone(timezone) self.color = color self.font = font self.largesize = 72 self.smallsize = 24 self.show_seconds = show_seconds # now import our items from ginga.gw import Viewers fi = Viewers.CanvasView(logger=logger) fi.set_bg(0.2, 0.2, 0.2) self.viewer = fi fi.add_callback('configure', self.clock_resized_cb) # canvas that we will draw on self.canvas = fi.get_canvas() wd, ht = width, height if self.show_seconds: wd += 300 fi.set_desired_size(wd, ht) iw = Viewers.GingaViewerWidget(viewer=fi) self.widget = iw self.clock_resized_cb(self.viewer, wd, ht) dt = datetime.utcnow().replace(tzinfo=pytz.utc) self.update_clock(dt)
def datetime(year=None, month=None, day=None, hour=None, min=None, sec=None, micro=None, offset=None, separators=True): """ Create a datetime timestamp for the given instant. Unspecified components default to their current counterparts. Arguments: year -- Integer value of the year (4 digits) month -- Integer value of the month day -- Integer value of the day of the month. hour -- Integer value of the hour. min -- Integer value of the number of minutes. sec -- Integer value of the number of seconds. micro -- Integer value of the number of microseconds. offset -- Either a positive or negative number of seconds to offset from UTC to match a desired timezone, or a tzinfo object. """ now = dt.datetime.utcnow() if year is None: year = now.year if month is None: month = now.month if day is None: day = now.day if hour is None: hour = now.hour if min is None: min = now.minute if sec is None: sec = now.second if micro is None: micro = now.microsecond if offset is None: offset = tzutc() elif not isinstance(offset, dt.tzinfo): offset = tzoffset(None, offset) date = dt.datetime(year, month, day, hour, min, sec, micro, offset) return format_datetime(date)
def dateparse_archive_obs(s): """Function to make datetime object aware of time zone e.g. date_parser=dateParserMeasured('2014/05/31 11:42') :arg s: string of date and time :type s: str :returns: datetime object that is timezone aware """ PST_tz = tz.tzoffset("PST", -28800) # Convert the string to a datetime object unaware = datetime.datetime.strptime(s, "%Y/%m/%d %H:%M") # Add in the local time zone (Canada/Pacific) aware = unaware.replace(tzinfo=PST_tz) # Convert to UTC return aware.astimezone(tz.tzutc())
def parse_iso8601(date_string, assume_utc=False, as_utc=True): if not date_string: return UNDEFINED_DATE dt, aware, tzseconds = speedup.parse_iso8601(date_string) tz = utc_tz if assume_utc else local_tz if aware: # timezone was specified if tzseconds == 0: tz = utc_tz else: sign = '-' if tzseconds < 0 else '+' description = "%s%02d:%02d" % (sign, abs(tzseconds) // 3600, (abs(tzseconds) % 3600) // 60) tz = tzoffset(description, tzseconds) dt = dt.replace(tzinfo=tz) if as_utc and tz is utc_tz: return dt return dt.astimezone(utc_tz if as_utc else local_tz)
def test_pass_naive_timestamp_default_timezone(self): """ The incoming value is a naive timestamp, but the Filter is configured not to treat naive timestamps as UTC. """ self.assertFilterPasses( # The incoming value is a naive timestamp, and the Filter # is configured to use UTC+8 by default. self._filter( '2015-05-12 09:20:03', timezone=tzoffset('UTC+8', 8 * 3600), ), # The resulting datetime is still converted to UTC. datetime(2015, 5, 12, 1, 20, 3, tzinfo=utc), )