예제 #1
0
 def test_filter_with_datetime_timezone(self):
     dt1 = datetime.datetime(2015, 4, 17, 23, 1, 2, 3000, tzinfo=UTCOffsetTimezone(0))
     dt2 = datetime.datetime(2015, 4, 17, 23, 1, 2, 3000, tzinfo=UTCOffsetTimezone(1))
     row = Row(date=dt1)
     df = self.spark.createDataFrame([row])
     self.assertEqual(0, df.filter(df.date == dt2).count())
     self.assertEqual(1, df.filter(df.date > dt2).count())
     self.assertEqual(0, df.filter(df.date < dt2).count())
예제 #2
0
 def test_time_with_timezone(self):
     day = datetime.date.today()
     now = datetime.datetime.now()
     ts = time.mktime(now.timetuple())
     # class in __main__ is not serializable
     from pyspark.testing.sqlutils import UTCOffsetTimezone
     utc = UTCOffsetTimezone()
     utcnow = datetime.datetime.utcfromtimestamp(ts)  # without microseconds
     # add microseconds to utcnow (keeping year,month,day,hour,minute,second)
     utcnow = datetime.datetime(*(utcnow.timetuple()[:6] + (now.microsecond, utc)))
     df = self.spark.createDataFrame([(day, now, utcnow)])
     day1, now1, utcnow1 = df.first()
     self.assertEqual(day1, day)
     self.assertEqual(now, now1)
     self.assertEqual(now, utcnow1)