def transform_columns(result: Result) -> Result: """ Converts Clickhouse results into formatted strings. Specifically: - timezone-naive date and datetime values returned by ClickHouse into ISO 8601 formatted strings (including the UTC offset.) - UUID objects into strings """ def iterate_rows(): if "totals" in result: return itertools.chain(result["data"], [result["totals"]]) else: return iter(result["data"]) for col in result["meta"]: if DATETIME_TYPE_RE.match(col["type"]): for row in iterate_rows(): row[col["name"]] = (row[col["name"]].replace( tzinfo=tz.tzutc()).isoformat()) elif DATE_TYPE_RE.match(col["type"]): for row in iterate_rows(): row[col["name"]] = (datetime( *(row[col["name"]].timetuple()[:6])).replace( tzinfo=tz.tzutc()).isoformat()) elif UUID_TYPE_RE.match(col["type"]): for row in iterate_rows(): row[col["name"]] = str(row[col["name"]]) return result
def test_valid_response(self): """ Tests on happy path, validating response and iterations over it """ response = KustoResponse(json.loads(RESPONSE_TEXT)) # Test that basic iteration works row_count = 0 for _ in response.iter_all(): row_count = row_count + 1 self.assertEqual(row_count, 3) self.assertEqual(5, response.get_table_count()) # Test access by index and by column name for row in response.iter_all(): self.assertEqual(row[0], row['Timestamp']) self.assertEqual(row[1], row['Name']) self.assertEqual(row[2], row['Altitude']) self.assertEqual(row[3], row['Temperature']) self.assertEqual(row[4], row['IsFlying']) self.assertEqual(row[5], row['TimeFlying']) # Test all types for row in response.iter_all(): if row[0] is not None: self.assertEqual(type(row[0]), datetime) try: self.assertEqual(type(row[1]), str) except AssertionError: self.assertEqual(type(row[1]), unicode) self.assertEqual(type(row[2]), int) self.assertEqual(type(row[3]), float) self.assertEqual(type(row[4]), bool) self.assertEqual(type(row[5]), timedelta) # Test actual values rows = list(response.iter_all()) self.assertEqual(datetime(2016, 6, 6, 15, 35, tzinfo=tzutc()), rows[0]['Timestamp']) self.assertEqual('foo', rows[0]['Name']) self.assertEqual(101, rows[0]['Altitude']) self.assertAlmostEqual(3.14, rows[0]['Temperature'], 2) self.assertEqual(False, rows[0]['IsFlying']) self.assertEqual( timedelta(days=4, hours=1, minutes=2, seconds=3, milliseconds=567), rows[0]['TimeFlying']) self.assertEqual(datetime(2016, 6, 7, 16, tzinfo=tzutc()), rows[1]['Timestamp']) self.assertEqual('bar', rows[1]['Name']) self.assertEqual(555, rows[1]['Altitude']) self.assertAlmostEqual(2.71, rows[1]['Temperature'], 2) self.assertEqual(True, rows[1]['IsFlying']) self.assertEqual(timedelta(), rows[1]['TimeFlying']) self.assertIsNone(rows[2]['Timestamp']) self.assertIn(rows[2]['Name'], ['', u'']) self.assertIsNone(rows[2]['Altitude']) self.assertIsNone(rows[2]['Temperature'], 2) self.assertIsNone(rows[2]['IsFlying']) self.assertIsNone(rows[2]['TimeFlying']) # Test second table rows = list(response.iter_all(1)) self.assertEqual(1, len(rows))
def test_strictdatetime(self): class MySchema(EmbeddedSchema): a = fields.StrictDateTimeField() b = fields.StrictDateTimeField(load_as_tz_aware=False) c = fields.StrictDateTimeField(load_as_tz_aware=True) # Test _deserialize s = MySchema(strict=True) for date in ( datetime(2016, 8, 6), datetime(2016, 8, 6, tzinfo=tzutc()), "2016-08-06T00:00:00Z", "2016-08-06T00:00:00", ): data, _ = s.load({'a': date, 'b': date, 'c': date}) assert data['a'] == datetime(2016, 8, 6) assert data['b'] == datetime(2016, 8, 6) assert data['c'] == datetime(2016, 8, 6, tzinfo=tzutc()) for date in ( "2016-08-06T00:00:00+02:00", datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)), ): data, _ = s.load({'a': date, 'b': date, 'c': date}) assert data['a'] == datetime(2016, 8, 5, 22, 0) assert data['b'] == datetime(2016, 8, 5, 22, 0) assert data['c'] == datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)) with pytest.raises(ValidationError): s.load({'a': "dummy"}) # Test _deserialize_from_mongo MyDataProxy = data_proxy_factory('My', MySchema()) d = MyDataProxy() for date in ( datetime(2016, 8, 6), datetime(2016, 8, 6, tzinfo=tzutc()), ): d.from_mongo({'a': date, 'b': date, 'c': date}) assert d.get('a') == datetime(2016, 8, 6) assert d.get('b') == datetime(2016, 8, 6) assert d.get('c') == datetime(2016, 8, 6, tzinfo=tzutc()) for date in (datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)), ): d.from_mongo({'a': date, 'b': date, 'c': date}) assert d.get('a') == datetime(2016, 8, 5, 22, 0) assert d.get('b') == datetime(2016, 8, 5, 22, 0) assert d.get('c') == datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200))
def transform_datetime(value: datetime) -> str: """ Convert a timezone-naive datetime object into an ISO 8601 formatted date and time string representation. """ if value.tzinfo is None: value = value.replace(tzinfo=tz.tzutc()) else: value = value.astimezone(tz.tzutc()) return value.isoformat()
def test_datetime_conversion_distant_past(): # Can any AD date, but not the year 0, which I guess is BC col = DateTimeColumn(name='datetime') d1 = '2018-06-11 10:02:10' d2 = '1018-06-11 10:02:10' d3 = '0001-06-11 10:02:10' d4 = '0000-06-11 10:02:10' assert_equal(col.convert(d1), datetime(2018, 6, 11, 10, 2, 10, tzinfo=tzutc())) assert_equal(col.convert(d2), datetime(1018, 6, 11, 10, 2, 10, tzinfo=tzutc())) assert_equal(col.convert(d3), datetime(1, 6, 11, 10, 2, 10, tzinfo=tzutc())) assert_equal(col.convert(d4), None)
def test_strictdatetime(self): class MySchema(EmbeddedSchema): a = fields.StrictDateTimeField() b = fields.StrictDateTimeField(load_as_tz_aware=False) c = fields.StrictDateTimeField(load_as_tz_aware=True) # Test _deserialize s = MySchema(strict=True) for date in ( datetime(2016, 8, 6), datetime(2016, 8, 6, tzinfo=tzutc()), "2016-08-06T00:00:00Z", "2016-08-06T00:00:00", ): data, _ = s.load({'a': date, 'b': date, 'c': date}) assert data['a'] == datetime(2016, 8, 6) assert data['b'] == datetime(2016, 8, 6) assert data['c'] == datetime(2016, 8, 6, tzinfo=tzutc()) for date in ( "2016-08-06T00:00:00+02:00", datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)), ): data, _ = s.load({'a': date, 'b': date, 'c': date}) assert data['a'] == datetime(2016, 8, 5, 22, 0) assert data['b'] == datetime(2016, 8, 5, 22, 0) assert data['c'] == datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)) with pytest.raises(ValidationError): s.load({'a': "dummy"}) # Test _deserialize_from_mongo MyDataProxy = data_proxy_factory('My', MySchema()) d = MyDataProxy() for date in ( datetime(2016, 8, 6), datetime(2016, 8, 6, tzinfo=tzutc()), ): d.from_mongo({'a': date, 'b': date, 'c': date}) assert d.get('a') == datetime(2016, 8, 6) assert d.get('b') == datetime(2016, 8, 6) assert d.get('c') == datetime(2016, 8, 6, tzinfo=tzutc()) for date in ( datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)), ): d.from_mongo({'a': date, 'b': date, 'c': date}) assert d.get('a') == datetime(2016, 8, 5, 22, 0) assert d.get('b') == datetime(2016, 8, 5, 22, 0) assert d.get('c') == datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200))
def test_parse_date_ranges(): eighth_march_2014 = { 'time': Range(datetime(2014, 3, 8, tzinfo=tzutc()), datetime(2014, 3, 8, 23, 59, 59, tzinfo=tzutc())) } assert eighth_march_2014 == parse_expressions('time in 2014-03-08') assert eighth_march_2014 == parse_expressions('time in 2014-03-8') march_2014 = { 'time': Range(datetime(2014, 3, 1, tzinfo=tzutc()), datetime(2014, 3, 31, 23, 59, 59, tzinfo=tzutc())) } assert march_2014 == parse_expressions('time in 2014-03') assert march_2014 == parse_expressions('time in 2014-3') # Leap year, 28 days feb_2014 = { 'time': Range(datetime(2014, 2, 1, tzinfo=tzutc()), datetime(2014, 2, 28, 23, 59, 59, tzinfo=tzutc())) } assert feb_2014 == parse_expressions('time in 2014-02') assert feb_2014 == parse_expressions('time in 2014-2') # Entire year year_2014 = { 'time': Range(datetime(2014, 1, 1, tzinfo=tzutc()), datetime(2014, 12, 31, 23, 59, 59, tzinfo=tzutc())) } assert year_2014 == parse_expressions('time in 2014')
def test_datetime(): expected = [ datetime(2013, 4, 23, 13, 24, 55, 123456, tzinfo=tz.tzutc()), datetime(2015, 1, 20), datetime(2015, 5, 10), ] assert neon.decode(NEON_DATETIME) == expected
def _process_depth_message(self, msg, buffer=False): """Process a depth event message. :param msg: Depth event message. :return: """ if buffer and msg['u'] <= self._last_update_id: # ignore any updates before the initial update id return elif msg['U'] != self._last_update_id + 1: # if not buffered check we get sequential updates # otherwise init cache again self._init_cache() # add any bid or ask values for bid in msg['b']: self._depth_cache.add_bid(bid) for ask in msg['a']: self._depth_cache.add_ask(ask) # keeping update time self._depth_cache.update_time = datetime.utcfromtimestamp(msg['E'] / 1000) \ .replace(tzinfo=tz.tzutc()) # call the callback with the updated depth cache if self._callback: self._callback(self._depth_cache) self._last_update_id = msg['u'] # after processing event see if we need to refresh the depth cache if self._refresh_interval and int(time.time()) > self._refresh_time: self._init_cache()
def _generate_tzoffsets(limited): def _mkoffset(hmtuple, fmt): h, m = hmtuple m_td = (-1 if h < 0 else 1) * m tzo = tz.tzoffset(None, timedelta(hours=h, minutes=m_td)) return tzo, fmt.format(h, m) out = [] if not limited: # The subset that's just hours hm_out_h = [(h, 0) for h in (-23, -5, 0, 5, 23)] out.extend([_mkoffset(hm, '{:+03d}') for hm in hm_out_h]) # Ones that have hours and minutes hm_out = [] + hm_out_h hm_out += [(-12, 15), (11, 30), (10, 2), (5, 15), (-5, 30)] else: hm_out = [(-5, -0)] fmts = ['{:+03d}:{:02d}', '{:+03d}{:02d}'] out += [_mkoffset(hm, fmt) for hm in hm_out for fmt in fmts] # Also add in UTC and naive out.append((tz.tzutc(), 'Z')) out.append((None, '')) return out
def as_expression(field: Field, value) -> Expression: """ Convert a single field/value to expression, following the "simple" conventions. """ if isinstance(value, Range): return field.between(value.begin, value.end) elif isinstance(value, list): return OrExpression(*(as_expression(field, val) for val in value)) # Treat a date (day) as a time range. elif isinstance(value, date) and not isinstance(value, datetime): return as_expression( field, Range(datetime.combine(value, time.min.replace(tzinfo=tz.tzutc())), datetime.combine(value, time.max.replace(tzinfo=tz.tzutc())))) return field == value
def gettz(self) -> datetime.tzinfo: """ Get device timezone from context attributes https://smarthub-wbench.wesp.telekom.net/pages/smarthub_cloud/skill-spi/public/#attribute-types :return: """ timezone = self._get_attribute('timezone', 'UTC') return tz.gettz(timezone) or tz.tzutc()
def __get_local_datetime(self, datetime_string): from_zone = tz.tzutc() to_zone = tz.tzlocal() date = dateutil.parser.parse(datetime_string) date.replace(tzinfo=from_zone) return date.astimezone(to_zone)
def scrub_ch_data(data, meta): # for now, convert back to a dict-y format to emulate the json data = [{c[0]: d[i] for i, c in enumerate(meta)} for d in data] meta = [{'name': m[0], 'type': m[1]} for m in meta] for col in meta: # Convert naive datetime strings back to TZ aware ones, and stringify # TODO maybe this should be in the json serializer if DATETIME_TYPE_RE.match(col['type']): for d in data: d[col['name']] = d[col['name']].replace(tzinfo=tz.tzutc()).isoformat() elif DATE_TYPE_RE.match(col['type']): for d in data: dt = datetime(*(d[col['name']].timetuple()[:6])).replace(tzinfo=tz.tzutc()) d[col['name']] = dt.isoformat() return (data, meta)
def test_valid_response(self): """Tests on happy path, validating response and iterations over it.""" response = KustoResponseDataSetV2(json.loads(RESPONSE_TEXT)) # Test that basic iteration works self.assertEqual(len(response), 3) self.assertEqual(len(list(response.primary_results[0])), 3) table = list(response.tables[0]) self.assertEqual(1, len(table)) expected_table = [ [ datetime(2016, 6, 6, 15, 35, tzinfo=tzutc()), "foo", 101, 3.14, False, timedelta(days=4, hours=1, minutes=2, seconds=3, milliseconds=567), ], [datetime(2016, 6, 7, 16, tzinfo=tzutc()), "bar", 555, 2.71, True, timedelta()], [None, text_type(""), None, None, None, None], ] # Test access by index and by column name primary_table = response.primary_results[0] for row in primary_table: self.assertEqual(row[0], row["Timestamp"]) self.assertEqual(row[1], row["Name"]) self.assertEqual(row[2], row["Altitude"]) self.assertEqual(row[3], row["Temperature"]) self.assertEqual(row[4], row["IsFlying"]) self.assertEqual(row[5], row["TimeFlying"]) # Test all types self.assertEqual(type(row[0]), datetime if row[0] else type(None)) self.assertEqual(type(row[1]), text_type) self.assertEqual(type(row[2]), int if row[2] else type(None)) self.assertEqual(type(row[3]), float if row[3] else type(None)) self.assertEqual(type(row[4]), bool if row[4] is not None else type(None)) self.assertEqual(type(row[5]), timedelta if row[5] is not None else type(None)) for i in range(0, len(primary_table)): row = primary_table[i] expected_row = expected_table[i] for j in range(0, len(row)): self.assertEqual(row[j], expected_row[j])
def test_valid_response(self): """Tests on happy path, validating response and iterations over it.""" response = KustoResponseDataSetV2(json.loads(RESPONSE_TEXT)) # Test that basic iteration works assert len(response) == 3 assert len(list(response.primary_results[0])) == 3 table = list(response.tables[0]) assert 1 == len(table) expected_table = [ [ datetime(2016, 6, 6, 15, 35, tzinfo=tzutc()), "foo", 101, 3.14, False, timedelta(days=4, hours=1, minutes=2, seconds=3, milliseconds=567) ], [ datetime(2016, 6, 7, 16, tzinfo=tzutc()), "bar", 555, 2.71, True, timedelta() ], [None, str(""), None, None, None, None], ] columns = [ "Timestamp", "Name", "Altitude", "Temperature", "IsFlying", "TimeFlying" ] # Test access by index and by column name primary_table = response.primary_results[0] for row in primary_table: # Test all types for i, expected_type in enumerate( [datetime, str, int, float, bool, timedelta]): assert row[i] == row[columns[i]] assert row[i] is None or isinstance(row[i], expected_type) for row_index, row in enumerate(primary_table): expected_row = expected_table[row_index] for col_index, value in enumerate(row): assert value == expected_row[col_index]
def test_transform_datetime() -> None: now = datetime(2020, 1, 2, 3, 4, 5) fmt = "2020-01-02T03:04:05+00:00" assert transform_datetime(now) == fmt assert transform_datetime(now.replace(tzinfo=tz.tzutc())) == fmt offset = timedelta(hours=8) assert (transform_datetime( now.replace(tzinfo=tz.tzoffset("PST", offset)) + offset) == fmt)
def utc_to_localtime(dt, session=None): lang = 'pl' if session and 'lang' in session: lang = session['lang'] locale.setlocale(locale.LC_TIME, loc_map[lang]) d = dt.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal()) return d.strftime('%a, %d %B %Y %H:%M:%S %Z')
def test_parse_dates(): assert {'time': datetime(2014, 3, 2, tzinfo=tzutc())} == parse_expressions('time = 2014-03-02') assert {'time': datetime(2014, 3, 2, tzinfo=tzutc())} == parse_expressions('time = 2014-3-2') # A missing day defaults to the first of the month. # They are probably better off using in-expessions in these cases (eg. "time in 2013-01"), but it's here # for backwards compatibility. march_2014 = { 'time': datetime(2014, 3, 1, tzinfo=tzutc()) } assert march_2014 == parse_expressions('time = 2014-03') assert march_2014 == parse_expressions('time = 2014-3') implied_feb_2014 = { 'time': Range(datetime(2014, 2, 1, tzinfo=tzutc()), datetime(2014, 3, 1, tzinfo=tzutc())) } assert implied_feb_2014 == parse_expressions('2014-02 < time < 2014-03') assert implied_feb_2014 == parse_expressions('time in range (2014-02, 2014-03)')
def fetch_quote(ticker, time=None): """Fetch a quote from Coinbase.""" url = "https://api.coinbase.com/v2/prices/{}/spot".format(ticker.lower()) options = {} if time is not None: options['date'] = time.astimezone(tz.tzutc()).date().isoformat() response = requests.get(url, options) if response.status_code != requests.codes.ok: raise CoinbaseError("Invalid response ({}): {}".format( response.status_code, response.text)) result = response.json() price = D(result['data']['amount']) if time is None: time = datetime.datetime.now(tz.tzutc()) currency = result['data']['currency'] return source.SourcePrice(price, time, currency)
def transform_date(value: date) -> str: """ Convert a timezone-naive date object into an ISO 8601 formatted date and time string respresentation. """ # XXX: Both Python and ClickHouse date objects do not have time zones, so # just assume UTC. (Ideally, we'd have just left these as timezone naive to # begin with and not done this transformation at all, since the time # portion has no benefit or significance here.) return datetime(*value.timetuple()[:6]).replace(tzinfo=tz.tzutc()).isoformat()
def isoStr2utc8Str(isoStr): # 取出timestamp,解析转成iso8601 navie datetime from datetime import datetime utc = datetime.strptime(isoStr, '%Y-%m-%dT%H:%M:%S.%fZ') # utc navie datetime设置时区,再换成本地时区,最后解析成字符串。时区可以硬编码。 from dateutil.tz import tz utc8Time = utc.replace(tzinfo=tz.tzutc()).astimezone( tz.tzlocal()).strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] # utc8Time = utc.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal()).replace(tzinfo=None) return utc8Time
def convert_datetime_string_to_time(date): utc_utz = tz.tzutc() central_tz = _get_central_tz() date = str(date) _datetime = datetime.strptime(date, '%Y-%m-%dT%H:%M:%S') utc_datetime = _datetime.replace(tzinfo=utc_utz) central_datetime = utc_datetime.astimezone(central_tz) central_time = central_datetime.time() return central_time
async def convert_utc_to_local(self, utc_time): """ Given a datetime string formatted as "YYYY-MM-DD HH:MM:SS" in zulu time, convert to the local timezone equivalent :param utc_time: :return: The string formatted the same way but in local time """ local_time = datetime.strptime(utc_time, '%Y-%m-%d %H:%M:%S') local_time = local_time.replace(tzinfo=tz.tzutc()) local_time = local_time.astimezone(tz.tzlocal()) return local_time
def get_stats(): total = UserSetting.select().count() users = select( (u.updated_at, u.points_count, u.username, u.id) for u in UserSetting).order_by(lambda u, p, n, i: desc(u)).limit(10) return ('Total: %s\n\n' % total) + '\n'.join(('`%s %s` %s' % ( upd.replace( tzinfo=tzutc()).astimezone(tzlocal()).strftime('%d.%m.%y %H:%M'), pc, ('@%s' % un) if un else uid, )) for upd, pc, un, uid in users)
def test_datetime(self): class MySchema(EmbeddedSchema): a = fields.DateTimeField() s = MySchema(strict=True) data, _ = s.load({'a': datetime(2016, 8, 6)}) assert data['a'] == datetime(2016, 8, 6) data, _ = s.load({'a': "2016-08-06T00:00:00Z"}) assert data['a'] == datetime(2016, 8, 6, tzinfo=tzutc()) data, _ = s.load({'a': "2016-08-06T00:00:00"}) assert data['a'] == datetime(2016, 8, 6) with pytest.raises(ValidationError): s.load({'a': "dummy"})
class RetreatFactory(factory.DjangoModelFactory): class Meta: model = Retreat django_get_or_create = ('name', ) name = factory.sequence(lambda n: f'Retreat {n}') place_name = '' country = '' state_province = factory.Faker('state') city = factory.Faker('city') address_line1 = 'address' address_line2 = factory.Faker('secondary_address') postal_code = factory.Faker('postalcode') latitude = factory.Faker('latitude') longitude = factory.Faker('longitude') timezone = factory.Faker('timezone') details = factory.Faker('text', max_nb_chars=1000) seats = factory.fuzzy.FuzzyInteger(0) notification_interval = timedelta(hours=24) activity_language = factory.fuzzy.FuzzyChoice(Retreat.ACTIVITY_LANGUAGE) price = factory.fuzzy.FuzzyDecimal(0, 9999, 2) start_time = factory.Faker('date_time_between', start_date="+10d", end_date="+30d", tzinfo=tz.tzutc()) end_time = factory.Faker('date_time_between', start_date="+31d", end_date="+600d", tzinfo=tz.tzutc()) min_day_refund = factory.fuzzy.FuzzyInteger(0) refund_rate = factory.fuzzy.FuzzyInteger(0) min_day_exchange = factory.fuzzy.FuzzyInteger(0) # users = # exclusive_memberships = is_active = factory.Faker('boolean', chance_of_getting_true=50) email_content = factory.Faker('email') accessibility = factory.Faker('boolean', chance_of_getting_true=50) has_shared_rooms = factory.Faker('boolean', chance_of_getting_true=50)
def fetch_quote(ticker): """Fetch a quote from Coinbase.""" url = "https://api.coinbase.com/v2/prices/{}/spot".format(ticker.lower()) response = requests.get(url) if response.status_code != requests.codes.ok: raise CoinbaseError("Invalid response ({}): {}".format(response.status_code, response.text)) result = response.json() price = D(result['data']['amount']).quantize(D('0.01')) time = datetime.datetime.now(tz.tzutc()) currency = result['data']['currency'] return source.SourcePrice(price, time, currency)
def __get_local_datetime(datetime_input): if type(datetime_input) is str: local_datetime = dateutil.parser.parse(datetime_input) elif type(datetime_input) is datetime: if datetime_input.tzinfo is None: local_datetime = datetime_input.replace(tzinfo=tz.tzutc()) else: local_datetime = datetime_input else: raise TypeError("Unknown type for datetime input: {}".format( type(datetime_input))) return local_datetime.astimezone(tz.tzlocal())
def timestr_to_utc(time_str, local=True): """ Converts a time string to 24 hour time. If local=True, time_str should contain a timezone offset and the caller should expect a localized datetime. eg 2016-10-25 07:00:00+00:00 year-mo-da 24hr:min:sec+timezone_offset time_str can also contain dates in: timestr_to_utc('1/1/16 12:00 AM -07:00') out: 2016-01-01 07:00:00+00:00 (datetime) Else, time_str should not contain an offset, and the caller should expect a non-localized datetime. """ if local: local_time = parse(time_str, fuzzy=True) utc_time = local_time.astimezone(tz.tzutc()) return utc_time else: return parse(time_str)
def test_sanity_control_command(self, mock_post, mock_aad): """Tests contol command.""" client = KustoClient("https://somecluster.kusto.windows.net") response = client.execute_mgmt("NetDefaultDB", ".show version") self.assertEqual(len(response), 1) primary_table = response.primary_results[0] row_count = 0 for _ in primary_table: row_count += 1 self.assertEqual(row_count, 1) result = primary_table[0] self.assertEqual(result["BuildVersion"], "1.0.6693.14577") self.assertEqual( result["BuildTime"], datetime(year=2018, month=4, day=29, hour=8, minute=5, second=54, tzinfo=tzutc()), ) self.assertEqual(result["ServiceType"], "Engine") self.assertEqual(result["ProductVersion"], "KustoMain_2018.04.29.5")
def get_utc_datetime(self, line, regex, date_time_format): """ From the given string retrieve the utc time :param line: string containing the time :return: utc datetime or None if no time found """ date_time_regex = r"{}".format(regex) search_obj = re.search(date_time_regex, line, re.M | re.I) time_in_utc = line if search_obj: time_string = search_obj.group() utc_zone = tz.tzutc() dt_object = datetime.strptime(time_string, date_time_format) time_in_utc = dt_object.astimezone(tz=utc_zone) else: pass return time_in_utc
def test_datetime(self): class MySchema(EmbeddedSchema): a = fields.DateTimeField() b = fields.LocalDateTimeField() s = MySchema(strict=True) data, _ = s.load({'a': dt.datetime(2016, 8, 6)}) assert data['a'] == dt.datetime(2016, 8, 6) data, _ = s.load({'a': "2016-08-06T00:00:00Z"}) assert data['a'] == dt.datetime(2016, 8, 6, tzinfo=tzutc()) data, _ = s.load({'a': "2016-08-06T00:00:00"}) assert data['a'] == dt.datetime(2016, 8, 6) with pytest.raises(ValidationError): s.load({'a': "dummy"}) # Test DateTimeField and LocalDateTimeField round to milliseconds s = MySchema() data, _ = s.load({ 'a': dt.datetime(2016, 8, 6, 12, 30, 30, 123456), 'b': dt.datetime(2016, 8, 6, 12, 30, 30, 123456), }) assert data['a'].microsecond == 123000 assert data['b'].microsecond == 123000 s = MySchema() data, _ = s.load({ 'a': dt.datetime(2016, 8, 6, 12, 59, 59, 999876), 'b': dt.datetime(2016, 8, 6, 12, 59, 59, 999876), }) assert data['a'].hour == 13 assert data['b'].hour == 13 assert data['a'].minute == 0 assert data['b'].minute == 0 assert data['a'].second == 0 assert data['b'].second == 0 assert data['a'].microsecond == 0 assert data['b'].microsecond == 0
def get_entry(self): created_at = self.record['fields']['created'] # Convert timestamp to an offset-aware datetime date = self.parse_date(created_at).astimezone(tzutc()).replace(microsecond=0) return date
def test_parse_tzstr_zero_as_utc(tzstr, zero_as_utc): tzi = isoparser().parse_tzstr(tzstr, zero_as_utc=zero_as_utc) assert tzi == tz.tzutc() assert (type(tzi) == tz.tzutc) == zero_as_utc
watson.frames.add('bar', 20, 45, id='2', updated_at=45) conflicting, merging = watson.merge_report( str(datafiles) + '/frames-with-conflict') assert len(conflicting) == 1 assert len(merging) == 1 assert conflicting[0].id == '2' assert merging[0].id == '3' # report/log _dt = datetime.datetime _tz = {'tzinfo': tzutc()} @pytest.mark.parametrize('now, mode, start_time', [ (_dt(2016, 6, 2, **_tz), 'year', _dt(2016, 1, 1, **_tz)), (_dt(2016, 6, 2, **_tz), 'month', _dt(2016, 6, 1, **_tz)), (_dt(2016, 6, 2, **_tz), 'week', _dt(2016, 5, 30, **_tz)), (_dt(2016, 6, 2, **_tz), 'day', _dt(2016, 6, 2, **_tz)), (_dt(2012, 2, 24, **_tz), 'year', _dt(2012, 1, 1, **_tz)), (_dt(2012, 2, 24, **_tz), 'month', _dt(2012, 2, 1, **_tz)), (_dt(2012, 2, 24, **_tz), 'week', _dt(2012, 2, 20, **_tz)), (_dt(2012, 2, 24, **_tz), 'day', _dt(2012, 2, 24, **_tz)), ]) def test_get_start_time_for_period(now, mode, start_time): with mock_datetime(now, datetime):
# -*- coding: utf-8 -*- from __future__ import unicode_literals from datetime import datetime, timedelta, date, time import itertools as it from dateutil.tz import tz from dateutil.parser import isoparser, isoparse import pytest import six UTC = tz.tzutc() def _generate_tzoffsets(limited): def _mkoffset(hmtuple, fmt): h, m = hmtuple m_td = (-1 if h < 0 else 1) * m tzo = tz.tzoffset(None, timedelta(hours=h, minutes=m_td)) return tzo, fmt.format(h, m) out = [] if not limited: # The subset that's just hours hm_out_h = [(h, 0) for h in (-23, -5, 0, 5, 23)] out.extend([_mkoffset(hm, '{:+03d}') for hm in hm_out_h]) # Ones that have hours and minutes hm_out = [] + hm_out_h hm_out += [(-12, 15), (11, 30), (10, 2), (5, 15), (-5, 30)]
def test_strictdatetime(self): class MySchema(EmbeddedSchema): a = fields.StrictDateTimeField() b = fields.StrictDateTimeField(load_as_tz_aware=False) c = fields.StrictDateTimeField(load_as_tz_aware=True) # Test _deserialize s = MySchema(strict=True) for date in ( dt.datetime(2016, 8, 6), dt.datetime(2016, 8, 6, tzinfo=tzutc()), "2016-08-06T00:00:00Z", "2016-08-06T00:00:00", ): data, _ = s.load({'a': date, 'b': date, 'c': date}) assert data['a'] == dt.datetime(2016, 8, 6) assert data['b'] == dt.datetime(2016, 8, 6) assert data['c'] == dt.datetime(2016, 8, 6, tzinfo=tzutc()) for date in ( "2016-08-06T00:00:00+02:00", dt.datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)), ): data, _ = s.load({'a': date, 'b': date, 'c': date}) assert data['a'] == dt.datetime(2016, 8, 5, 22, 0) assert data['b'] == dt.datetime(2016, 8, 5, 22, 0) assert data['c'] == dt.datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)) with pytest.raises(ValidationError): s.load({'a': "dummy"}) # Test _deserialize_from_mongo MyDataProxy = data_proxy_factory('My', MySchema()) d = MyDataProxy() for date in ( dt.datetime(2016, 8, 6), dt.datetime(2016, 8, 6, tzinfo=tzutc()), ): d.from_mongo({'a': date, 'b': date, 'c': date}) assert d.get('a') == dt.datetime(2016, 8, 6) assert d.get('b') == dt.datetime(2016, 8, 6) assert d.get('c') == dt.datetime(2016, 8, 6, tzinfo=tzutc()) for date in ( dt.datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)), ): d.from_mongo({'a': date, 'b': date, 'c': date}) assert d.get('a') == dt.datetime(2016, 8, 5, 22, 0) assert d.get('b') == dt.datetime(2016, 8, 5, 22, 0) assert d.get('c') == dt.datetime(2016, 8, 6, tzinfo=tzoffset(None, 7200)) # Test StrictDateTimeField rounds to milliseconds s = MySchema() data, _ = s.load({'a': dt.datetime(2016, 8, 6, 12, 30, 30, 123456)}) assert data['a'].microsecond == 123000 data, _ = s.load({'a': dt.datetime(2016, 8, 6, 12, 59, 59, 999876)}) assert data['a'].hour == 13 assert data['a'].minute == 0 assert data['a'].second == 0 assert data['a'].microsecond == 0
def test_datetime(): expected = [datetime(2013, 4, 23, 13, 24, 55, 123456, tzinfo=tz.tzutc()), datetime(2015, 1, 20), datetime(2015, 5, 10)] assert neon.decode(NEON_DATETIME) == expected