def test_inactive_date(self): ## fixed day: self.assertEqual( OrgFormat.inactive_date(time.struct_time([1980,12,31,0,0,0,0,0,0])), u'[1980-12-31 Mon]' ) ## however, it was a Wednesday ## fixed time with seconds: self.assertEqual( OrgFormat.inactive_date(time.struct_time([1980,12,31,23,59,58,0,0,0]), 'foo'), u'[1980-12-31 Mon 23:59:58]' ) ## however, it was a Wednesday ## fixed time without seconds: self.assertEqual( OrgFormat.inactive_date(time.struct_time([1980,12,31,23,59,0,0,0,0]), 'foo'), u'[1980-12-31 Mon 23:59]' ) ## however, it was a Wednesday YYYYMMDDwday = time.strftime('%Y-%m-%d %a', time.localtime()) hhmmss = time.strftime('%H:%M:%S', time.localtime()) ## simple form with current day: self.assertEqual( OrgFormat.inactive_date(time.localtime()), u'[' + YYYYMMDDwday + u']' ) ## show_time parameter not named: self.assertEqual( OrgFormat.inactive_date(time.localtime(), True), u'[' + YYYYMMDDwday + u' ' + hhmmss + u']' ) ## show_time parameter named: self.assertEqual( OrgFormat.inactive_date(time.localtime(), show_time=True), u'[' + YYYYMMDDwday + u' ' + hhmmss + u']' )
def test_creates_item_from_given_data(self): data = dict( guid='http://news.com/rss/1234abcd', published_parsed=struct_time([2015, 2, 25, 16, 45, 23, 2, 56, 0]), updated_parsed=struct_time([2015, 2, 25, 17, 52, 11, 2, 56, 0]), title='Breaking News!', summary='Something happened...', body_text='This is body text.', author='author', ) item = self.instance._create_item(data, source='source') self.assertEqual(item.get('guid'), 'http://news.com/rss/1234abcd') self.assertEqual(item.get('uri'), 'http://news.com/rss/1234abcd') self.assertEqual(item.get('type'), 'text') self.assertEqual( item.get('firstcreated'), datetime(2015, 2, 25, 16, 45, 23)) self.assertEqual( item.get('versioncreated'), datetime(2015, 2, 25, 17, 52, 11)) self.assertEqual(item.get('headline'), 'Breaking News!') self.assertEqual(item.get('abstract'), 'Something happened...') self.assertEqual(item.get('body_html'), '<p><a href="http://news.com/rss/1234abcd" target="_blank">source</a></p>This is body text.') self.assertEqual(item.get('byline'), 'author') dateline = item.get('dateline', {}) self.assertEqual(dateline.get('source'), 'source') self.assertEqual(dateline.get('date'), item.get('firstcreated'))
def test_returns_items_built_from_retrieved_data(self): feed_parse.return_value = MagicMock( entries=[ MagicMock( updated_parsed=struct_time( [2015, 2, 25, 17, 11, 11, 2, 56, 0]) ), MagicMock( updated_parsed=struct_time( [2015, 2, 25, 17, 22, 22, 2, 56, 0]) ), ] ) item_1 = dict( guid='item_1', firstcreated=datetime(2015, 2, 25, 17, 11, 11), versioncreated=datetime(2015, 2, 25, 17, 11, 11), ) item_2 = dict( guid='item_2', firstcreated=datetime(2015, 2, 25, 17, 22, 22), versioncreated=datetime(2015, 2, 25, 17, 22, 22), ) self.instance._create_item.side_effect = [item_1, item_2] returned = self.instance._update( {'last_updated': datetime(2015, 2, 25, 14, 0, 0)} ) self.assertEqual(len(returned), 1) items = returned[0] self.assertEqual(items, [item_1, item_2])
def knox(x1, y1, t1, x2, y2, t2, dist_scale, time_scale_days, nrand=1000, verbose=True): '''Compute the Knox test statistic: X = # events near in space and time versus time-permuted''' # First get rid of points where the spatial location is undefined. t1leads=True wh1 = np.where(~np.isnan(x1)) wh2 = np.where(~np.isnan(x2)) x1, y1, t1 = x1[wh1], y1[wh1], t1[wh1] x2, y2, t2 = x2[wh2], y2[wh2], t2[wh2] # Now compute the times in seconds for easy math. t1 = np.array([time.mktime(time.struct_time(i)) for i in t1]) t2 = np.array([time.mktime(time.struct_time(i)) for i in t2]) time_scale = time_scale_days * 24 * 3600 # Determine the array sizes. We're going to be looping over (x1,y1,t1), # so make sure that's the shorter of the two data sets to take maximal # advantage of numpy's parallelization for vectorized operations. Nd1 = len(t1) Nd2 = len(t2) if Nd1 > Nd2: x3, y3, t3, Nd3 = x1, y1, t1, Nd1 x1, y1, t1, Nd1 = x2, y2, t2, Nd2 x2, y2, t2, Nd2 = x3, y3, t3, Nd3 t1leads = False # Compute the test statistic for the real data and the randomized data. X, randX = compute_test_statistic(x1, y1, t1, Nd1, x2, y2, t2, Nd2, \ dist_scale, time_scale, nrand=nrand, t1leads=t1leads, verbose=verbose) # OK, we're done now. return X, randX
def get_today_condition(user): now_time = time.localtime() last_time = time.mktime(time.struct_time([now_time.tm_year, now_time.tm_mon, now_time.tm_mday, 0, 0, 0, 0, 0, 0])) - 12 * 3600 today = now_time.tm_mday last_time = time.localtime(last_time) start_time = process_num(last_time.tm_year) + "-" + process_num(last_time.tm_mon) + "-" + process_num(last_time.tm_mday) + " " + process_num(last_time.tm_hour) + ":" + process_num(last_time.tm_min) + ":" + process_num(last_time.tm_sec) end_time = process_num(now_time.tm_year) + "-" + process_num(now_time.tm_mon) + "-" + process_num(now_time.tm_mday) + " " + process_num(now_time.tm_hour) + ":" + process_num(now_time.tm_min) + ":" + process_num(now_time.tm_sec) data = get_data(["user", 'startTime', 'endTime', 'type', 'distance', 'calories', 'steps', 'subType', 'actTime', 'nonActTime', 'dsNum', 'lsNum', 'wakeNum', 'wakeTimes', 'score'], start_time, end_time, user.id, raw=True) i = 0 length = len(data) while data[i]["endTime"].split('-')[2].split(' ')[0] != str(today) and i < length: data.pop(0) new_data = integrate_by_class(data) calculate_time(new_data) new_data.pop(0) time_first = new_data[0]["endTime"].split('-')[2].split(' ')[1].split(':') time_first = time.mktime(time.struct_time([now_time.tm_year, now_time.tm_mon, now_time.tm_mday, int(time_first[0]), int(time_first[1]), int(time_first[2]), 0, 0, 0])) time_00 = time.mktime(time.struct_time([now_time.tm_year, now_time.tm_mon, now_time.tm_mday, 0, 0, 0, 0, 0, 0])) rate = float(time_first - time_00) / new_data[0]["allTime"] new_data[0]["allTime"] -= (time_first - time_00) new_data[0]["startTime"] = (str(now_time.tm_year) + "-" + str(now_time.tm_mon) + "-" + str(now_time.tm_mday) + " " + "00:00:00").encode("utf-8") new_data[0]["distance"] = int(new_data[0]["distance"] * rate) new_data[0]["steps"] = int(new_data[0]["steps"] * rate) new_data[0]["calories"] = int(new_data[0]["calories"] * rate) new_data[0]["dsNum"] = int(new_data[0]["dsNum"] * rate) new_data[0]["sleepNum"] = int(new_data[0]["sleepNum"] * rate) return new_data
def main(): # Script outline # # 1. When is the object visible from Paranal? # 2. Is it visible within 4 hours after the trigger? # 3. Is it visible long enough (~ 1hr) RA = "23:18:11.57" DEC = "32:28:31.8" EQUINOX = "J2000" # Format for my script = ?!? skycat!!! #TRIGGER = # Example usage # Set the observatory information and calc twilights GRB = CelestialObject() GRB.setObservatory(siteabbrev='e') intime = time.gmtime(time.time()) intime = time.struct_time(intime[0:9]) timestruct = time.struct_time(intime) GRB.computeTwilights() GRB.computeNightLength() GRB.printInfo() intime = intime[0:6] print "" print "Given date: %s-%s-%s\t %s:%s:%s UT" % (intime[0], intime[1], intime[2], intime[3], intime[4], intime[5])
def test_creates_item_taking_field_name_aliases_into_account(self): data = dict( guid='http://news.com/rss/1234abcd', published_parsed=struct_time([2015, 2, 25, 16, 45, 23, 2, 56, 0]), updated_parsed=struct_time([2015, 2, 25, 17, 52, 11, 2, 56, 0]), title_field_alias='Breaking News!', summary_field_alias='Something happened...', body_text_field_alias='This is body text.', ) field_aliases = [{'title': 'title_field_alias'}, {'summary': 'summary_field_alias'}, {'body_text': 'body_text_field_alias'}] item = self.instance._create_item(data, field_aliases) self.assertEqual(item.get('guid'), 'http://news.com/rss/1234abcd') self.assertEqual(item.get('uri'), 'http://news.com/rss/1234abcd') self.assertEqual(item.get('type'), 'text') self.assertEqual( item.get('firstcreated'), datetime(2015, 2, 25, 16, 45, 23)) self.assertEqual( item.get('versioncreated'), datetime(2015, 2, 25, 17, 52, 11)) self.assertEqual(item.get('headline'), 'Breaking News!') self.assertEqual(item.get('abstract'), 'Something happened...') self.assertEqual(item.get('body_html'), '<p><a href="http://news.com/rss/1234abcd" target="_blank">source</a></p>This is body text.')
def test_does_not_use_body_text_populate_fallback_if_aliased(self): class CustomDict(dict): """Customized dict class, allows adding custom attributes to it.""" data = CustomDict( guid='http://news.com/rss/1234abcd', published_parsed=struct_time([2015, 2, 25, 16, 45, 23, 2, 56, 0]), updated_parsed=struct_time([2015, 2, 25, 17, 52, 11, 2, 56, 0]), title='Breaking News!', summary='Something happened...', # NOTE: no body_text field ) content_field = [ CustomDict(type='text/html', value='<p>This is body</p>') ] content_field[0].value = '<p>This is body</p>' data.content = content_field field_aliases = [{'body_text': 'body_text_field_alias'}] data.body_text_field_alias = None # simulate non-existing alias field item = self.instance._create_item(data, field_aliases) self.assertEqual(item.get('body_html'), '<p><a href="http://news.com/rss/1234abcd" target="_blank">source</a></p>')
def _parse_gpgga(self, args): # Parse the arguments (everything after data type) for NMEA GPGGA # 3D location fix sentence. data = args.split(',') if data is None or len(data) != 14: return # Unexpected number of params. # Parse fix time. time_utc = int(_parse_float(data[0])) if time_utc is not None: hours = time_utc // 10000 mins = (time_utc // 100) % 100 secs = time_utc % 100 # Set or update time to a friendly python time struct. if self.timestamp_utc is not None: self.timestamp_utc = time.struct_time(( self.timestamp_utc.tm_year, self.timestamp_utc.tm_mon, self.timestamp_utc.tm_mday, hours, mins, secs, 0, 0, -1)) else: self.timestamp_utc = time.struct_time((0, 0, 0, hours, mins, secs, 0, 0, -1)) # Parse latitude and longitude. self.latitude = _parse_degrees(data[1]) if self.latitude is not None and \ data[2] is not None and data[2].lower() == 's': self.latitude *= -1.0 self.longitude = _parse_degrees(data[3]) if self.longitude is not None and \ data[4] is not None and data[4].lower() == 'w': self.longitude *= -1.0 # Parse out fix quality and other simple numeric values. self.fix_quality = _parse_int(data[5]) self.satellites = _parse_int(data[6]) self.horizontal_dilution = _parse_float(data[7]) self.altitude_m = _parse_float(data[8]) self.height_geoid = _parse_float(data[10])
def test_inactive_date(self): ## fixed day: self.assertEqual( OrgFormat.inactive_date(time.struct_time([1980,12,31,0,0,0,0,0,0])), u'[1980-12-31 Wed]' ) ## fixed time with seconds: self.assertEqual( OrgFormat.inactive_date(time.struct_time([1980,12,31,23,59,58,0,0,0]), 'foo'), u'[1980-12-31 Wed 23:59]' ) ## seconds are not (yet) defined in Org-mode ## fixed time without seconds: self.assertEqual( OrgFormat.inactive_date(time.struct_time([1980,12,31,23,59,0,0,0,0]), 'foo'), u'[1980-12-31 Wed 23:59]' ) YYYYMMDDwday = time.strftime('%Y-%m-%d %a', time.localtime()) hhmmss = time.strftime('%H:%M', time.localtime()) ## seconds are not (yet) defined in Org-mode ## simple form with current day: self.assertEqual( OrgFormat.inactive_date(time.localtime()), u'[' + YYYYMMDDwday + u']' ) ## show_time parameter not named: self.assertEqual( OrgFormat.inactive_date(time.localtime(), True), u'[' + YYYYMMDDwday + u' ' + hhmmss + u']' ) ## show_time parameter named: self.assertEqual( OrgFormat.inactive_date(time.localtime(), show_time=True), u'[' + YYYYMMDDwday + u' ' + hhmmss + u']' )
def test_buttgmt(self): self.receive_message('/buttgmt +3') self.assertReplied('Timezone set to GMT+3') self.test_buttmeon(status=u'''\ Butt enabled, use /buttmeoff to disable it. Your timezone is set to *GMT+3*, use /buttgmt to change it.''') import mock import time with mock.patch( 'time.gmtime', return_value=time.struct_time((2016, 1, 18, 9, 50, 36, 0, 18, 0)) ): self.clear_queues() self.plugin.cron_go('instagram.butt') self.assertNoReplies() with mock.patch( 'time.gmtime', return_value=time.struct_time((2016, 1, 18, 6, 50, 36, 0, 18, 0)) ): self.plugin.cron_go('instagram.butt') self.assertEqual(self.pop_reply()[1]['caption'], 'Good morning!') self.receive_message('/buttgmt -5') self.assertReplied('Timezone set to GMT-5') with mock.patch( 'time.gmtime', return_value=time.struct_time((2016, 1, 18, 18, 50, 36, 0, 18, 0)) ): self.plugin.cron_go('instagram.butt') self.assertEqual(self.pop_reply()[1]['caption'], 'Bon appetit!')
def checkMakeNewFileName(self): ''' Routine to check if we need a new filename and to create one if so. Uses the increment flag (default is daily) to determine how often to create a new file. 2014-07-17 C. Wingard Added code to create files based on either daily or hourly increments. Adds time to base file name. ''' time_value = gmtime() time_string = strftime('%Y%m%dT%H%M', time_value) + '_UTC.dat' if self.increment == 'hourly': # check if the hour of the day has changed or if this is the first # time we've run this (e.g. hourOfDay == -1) if self.hourOfDay != struct_time(time_value).tm_hour: # create a new filename string self.fileName = self.basename + '_' + time_string # update current day of month self.hourOfDay = struct_time(time_value).tm_hour if self.increment == 'daily': # check if the day of month has changed or if this is the first # time we've run this (e.g. dayOfMonth == -1) if self.dayOfMonth != struct_time(time_value).tm_mday: # create a new filename string self.fileName = self.basename + '_' + time_string # update current day of month self.dayOfMonth = struct_time(time_value).tm_mday
def test_old_due_date_format(self): current = datetime.datetime.today() self.assertEqual( time.struct_time((current.year, 3, 12, 12, 0, 0, 1, 71, 0)), DateTest.date.from_json("March 12 12:00")) self.assertEqual( time.struct_time((current.year, 12, 4, 16, 30, 0, 2, 338, 0)), DateTest.date.from_json("December 4 16:30"))
def _strict_date(self, lean): py = self._precise_year() if lean == EARLIEST: return struct_time( [py, 1, 1] + TIME_EMPTY_TIME + TIME_EMPTY_EXTRAS) else: return struct_time( [py, 12, 31] + TIME_EMPTY_TIME + TIME_EMPTY_EXTRAS)
def align_sleep_time(): now_time = time.localtime() if now_time.tm_hour > 12 or (now_time.tm_hour == 12 and now_time.tm_min > 1): next_time = time.mktime(time.struct_time([now_time.tm_year, now_time.tm_mon, now_time.tm_mday, 12, 1, 0, 0, 0, 0])) next_time += 24 * 3600 else: next_time = time.mktime(time.struct_time([now_time.tm_year, now_time.tm_mon, now_time.tm_mday, 12, 1, 0, 0, 0, 0])) return (next_time - time.time())
def _parse_gprmc(self, args): # Parse the arguments (everything after data type) for NMEA GPRMC # minimum location fix sentence. data = args.split(',') if data is None or len(data) < 11 or data[0] is None: return # Unexpected number of params. # Parse fix time. time_utc = int(_parse_float(data[0])) if time_utc is not None: hours = time_utc // 10000 mins = (time_utc // 100) % 100 secs = time_utc % 100 # Set or update time to a friendly python time struct. if self.timestamp_utc is not None: self.timestamp_utc = time.struct_time(( self.timestamp_utc.tm_year, self.timestamp_utc.tm_mon, self.timestamp_utc.tm_mday, hours, mins, secs, 0, 0, -1)) else: self.timestamp_utc = time.struct_time((0, 0, 0, hours, mins, secs, 0, 0, -1)) # Parse status (active/fixed or void). status = data[1] self.fix_quality = 0 if status is not None and status.lower() == 'a': self.fix_quality = 1 # Parse latitude and longitude. self.latitude = _parse_degrees(data[2]) if self.latitude is not None and \ data[3] is not None and data[3].lower() == 's': self.latitude *= -1.0 self.longitude = _parse_degrees(data[4]) if self.longitude is not None and \ data[5] is not None and data[5].lower() == 'w': self.longitude *= -1.0 # Parse out speed and other simple numeric values. self.speed_knots = _parse_float(data[6]) self.track_angle_deg = _parse_float(data[7]) # Parse date. if data[8] is not None and len(data[8]) == 6: day = int(data[8][0:2]) month = int(data[8][2:4]) year = 2000 + int(data[8][4:6]) # Y2k bug, 2 digit date assumption. # This is a problem with the NMEA # spec and not this code. if self.timestamp_utc is not None: # Replace the timestamp with an updated one. # (struct_time is immutable and can't be changed in place) self.timestamp_utc = time.struct_time((year, month, day, self.timestamp_utc.tm_hour, self.timestamp_utc.tm_min, self.timestamp_utc.tm_sec, 0, 0, -1)) else: # Time hasn't been set so create it. self.timestamp_utc = time.struct_time((year, month, day, 0, 0, 0, 0, 0, -1))
def __calc_date_time(self): # Set self.date_time, self.date, & self.time by using # time.strftime(). # Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of # overloaded numbers is minimized. The order in which searches for # values within the format string is very important; it eliminates # possible ambiguity for what something represents. time_tuple = time.struct_time((1999, 3, 17, 22, 44, 55, 2, 76, 0)) date_time = [None, None, None] date_time[0] = time.strftime("%c", time_tuple).lower() date_time[1] = time.strftime("%x", time_tuple).lower() date_time[2] = time.strftime("%X", time_tuple).lower() replacement_pairs = [ ("%", "%%"), (self.f_weekday[2], "%A"), (self.f_month[3], "%B"), (self.a_weekday[2], "%a"), (self.a_month[3], "%b"), (self.am_pm[1], "%p"), ("1999", "%Y"), ("99", "%y"), ("22", "%H"), ("44", "%M"), ("55", "%S"), ("76", "%j"), ("17", "%d"), ("03", "%m"), ("3", "%m"), # '3' needed for when no leading zero. ("2", "%w"), ("10", "%I"), ] replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone for tz in tz_values]) for offset, directive in ((0, "%c"), (1, "%x"), (2, "%X")): current_format = date_time[offset] for old, new in replacement_pairs: # Must deal with possible lack of locale info # manifesting itself as the empty string (e.g., Swedish's # lack of AM/PM info) or a platform returning a tuple of empty # strings (e.g., MacOS 9 having timezone as ('','')). if old: current_format = current_format.replace(old, new) # If %W is used, then Sunday, 2005-01-03 will fall on week 0 since # 2005-01-03 occurs before the first Monday of the year. Otherwise # %U is used. time_tuple = time.struct_time((1999, 1, 3, 1, 1, 1, 6, 3, 0)) if "00" in time.strftime(directive, time_tuple): U_W = "%W" else: U_W = "%U" date_time[offset] = current_format.replace("11", U_W) self.LC_date_time = date_time[0] self.LC_date = date_time[1] self.LC_time = date_time[2]
def test_struct_time_to_jd(self): # Check conversion of AD date & time to Julian Date number st_ad = struct_time( [2018, 4, 19] + [10, 13, 54] + convert.TIME_EMPTY_EXTRAS) jd_ad = 2458227.9263194446 self.assertEqual(jd_ad, convert.struct_time_to_jd(st_ad)) # Check conversion of BC date & time to Julian Date number st_bc = struct_time( [-2018, 4, 19] + [10, 13, 54] + convert.TIME_EMPTY_EXTRAS) jd_bc = 984091.9263194444 self.assertEqual(jd_bc, convert.struct_time_to_jd(st_bc))
def __init__(self): self.day_names = {0:"Montag",1:"Dienstag",2:"Mittwoch",3:"Donnerstag",4:"Freitag",5:"Samstag",6:"Sonntag"} self.month_names = {1:"Januar", 2:"Februar", 3:"März", 4:"April", 5:"Mai", 6:"Juni", 7:"Juli", 8:"August", 9:"September", 10:"Oktober", 11:"November", 12:"Dezember"} self.actual_day = datetime.date.today() self.actual_calendar_week = int(time.strftime("%W",time.struct_time(time.localtime()))) self.actual_year = int(time.strftime("%Y",time.struct_time(time.localtime())))
def checkMakeNewFileName( self ): """ Routine to check if we need a new filename and to create one if so """ timevalue = gmtime() # if day of month has change or this is the first time (dayOfMonth == 0) if self.dayOfMonth != struct_time( timevalue ).tm_mday: #tested w/ tm_hour # create a new filename string self.fileName = self.baseFileName + "_" + strftime("%Y%m%d", timevalue) + "_UTC.txt" #_%H%M%S # update current day of month self.dayOfMonth = struct_time( timevalue ).tm_mday #tested w/ tm_hour
def integrate_data(data, s_time, days, is_score=False): length = len(s_time) processing_day = list() new_day = list() new_data = list() temp_data = 0 temp_num = 0 temp_day = transfer_time(s_time[0]) processing_day = [temp_day[0], temp_day[1], temp_day[2], 0, 0, 0, 0, 0, 0] processing_day = time.mktime(time.struct_time(processing_day)) new_day = [temp_day[0], temp_day[1], temp_day[2], 0, 0, 0, 0, 0, 0] new_day = time.mktime(time.struct_time(new_day)) j = 0 for i in range(length): temp_day = transfer_time(s_time[i]) new_day = time.mktime(time.struct_time([temp_day[0], temp_day[1], temp_day[2], 0, 0, 0, 0, 0, 0])) if new_day == processing_day: if data[i] != 0: temp_num += 1 temp_data += data[i] else: if is_score: if temp_num != 0: temp_data /= temp_num temp_num = 0 j += 1 num_interval = int((new_day - processing_day) / 86400) temp_data /= num_interval for k in range(num_interval): new_data.append(temp_data) processing_day = new_day temp_data = 0 if is_score: if temp_num != 0: temp_data /= float(temp_num) temp_num = 0 new_data.append(temp_data) length = len(new_data) if days > length: residual = days - length if length > 0: if is_score: temp_data = 0 else: temp_data = new_data[0] / (residual + 1) new_data[0] = temp_data else: temp_data = 0 for i in range(residual): new_data.insert(0, temp_data) else: new_data = new_data[(-1 * days):] return new_data
def parse_time(t): try: return time.struct_time(time.strptime(t,'%H:%M:%S')) except ValueError, e: try: t = time.strptime(t,'%d %H:%M:%S') t = list(t) t[2]+=1 return time.struct_time(t) except ValueError, e: raise Exception("Time format unknown")
def test_podcast_and_episode(): episode_storage = DummyEpisodeStorage() name = "name" feed = "http://example.com/feed.xml" podcast = Podcast(name, feed, episode_storage) assert podcast.name == name assert podcast.feed == feed assert podcast._episode_storage is episode_storage assert hasattr(podcast, "_episode_storage") assert not hasattr(podcast, "_episodes") episodes = podcast.episodes assert not hasattr(podcast, "_episode_storage") assert hasattr(podcast, "_episodes") assert episodes[0].guid == 1 assert episodes[0].title == 2 assert episodes[0].link == 3 assert episodes[0].media_href == 4 assert episodes[0].published == struct_time((2012, 5, 6, 7, 8, 9, 6, 127, -1)) assert episodes[0].downloaded == True assert episodes[1].guid == 6 assert episodes[1].title == 7 assert episodes[1].link == 8 assert episodes[1].media_href == 9 assert episodes[1].published == struct_time((2012, 6, 7, 7, 8, 9, 3, 159, -1)) assert episodes[1].downloaded == False assert not podcast.modified assert not episodes.modified assert not episodes[0].modified assert not episodes[1].modified episodes[0].guid = 11 assert not podcast.modified assert episodes.modified assert episodes[0].modified assert not episodes[1].modified episodes.modified = False episodes[0].modified = False assert not podcast.modified assert not episodes.modified assert not episodes[0].modified assert not episodes[1].modified assert len(episodes) == 2 episodes.append(Episode(podcast, 11, 12, 13, 14, "2012-12-12 12:12:12", True)) assert len(episodes) == 3 assert not podcast.modified assert episodes.modified assert not episodes[0].modified assert not episodes[1].modified assert not episodes[2].modified
def trim_struct_time(st, strip_time=False): """ Return a `struct_time` based on the one provided but with the extra fields `tm_wday`, `tm_yday`, and `tm_isdst` reset to default values. If `strip_time` is set to true the time value are also set to zero: `tm_hour`, `tm_min`, and `tm_sec`. """ if strip_time: return struct_time(list(st[:3]) + TIME_EMPTY_TIME + TIME_EMPTY_EXTRAS) else: return struct_time(list(st[:6]) + TIME_EMPTY_EXTRAS)
def test_jd_to_struct_time(self): # Check conversion of Julian Date number to AD date & time jd_ad = 2458227.9263194446 # As in `test_struct_time_to_jd` st_ad = struct_time( [2018, 4, 19] + [10, 13, 54] + convert.TIME_EMPTY_EXTRAS) self.assertEqual(st_ad, convert.jd_to_struct_time(jd_ad)) # Check conversion of Julian Date number to BC date & time # WARNING: Converted time is off by 1 second, 53 not 54 jd_bc = 984091.9263194444 # As in `test_struct_time_to_jd` st_bc = struct_time( [-2018, 4, 19] + [10, 13, 54 - 1] + convert.TIME_EMPTY_EXTRAS) self.assertEqual(st_bc, convert.jd_to_struct_time(jd_bc))
def test_datetupelutctimestamp(self): self.assertEqual( OrgFormat.datetupelutctimestamp('19801231'), time.struct_time([1980, 12, 31, 0, 0, 0, 2, 366, -1]) ) self.assertEqual( OrgFormat.datetupelutctimestamp('19801231T235958'), time.struct_time([1980, 12, 31, 23, 59, 58, 2, 366, -1]) )
def test_daterange(self): ## fixed time with seconds: self.assertEqual( OrgFormat.daterange( time.struct_time([1980,12,31,23,59,58,0,0,0]), time.struct_time([1981,1,15,15,30,02,0,0,0]), ), u'<1980-12-31 Wed>--<1981-01-15 Thu>' ) ## provoke error: with self.assertRaises(AssertionError): OrgFormat.daterange('foo', 42)
def test_datetimetupeliso8601(self): self.assertEqual( OrgFormat.datetimetupeliso8601('1980-12-31T23.59'), time.struct_time([1980, 12, 31, 23, 59, 0, 2, 366, -1]) ) self.assertEqual( OrgFormat.datetimetupeliso8601('1980-12-31T23.59.58'), time.struct_time([1980, 12, 31, 23, 59, 58, 2, 366, -1]) )
def toR(self, t1_as_leading_indicator=False, dist_scale=None, time_scale_days=None, verbose=True, nrand=1000): wh1 = np.where(~np.isnan(self.x1)) wh2 = np.where(~np.isnan(self.x2)) self.x1, self.y1, self.t1, self.N1 = self.x1[wh1], self.y1[wh1], self.t1[wh1], len(wh1[0]) self.x2, self.y2, self.t2, self.N2 = self.x2[wh2], self.y2[wh2], self.t2[wh2], len(wh2[0]) # Now compute the times in seconds for easy math. self.t1 = np.array([time.mktime(time.struct_time(i)) for i in self.t1]) self.t2 = np.array([time.mktime(time.struct_time(i)) for i in self.t2]) time_scale_days = time_scale_days if time_scale_days == None else time_scale_days * 24 * 3600 return test_statistics_function(self, t1_as_leading_indicator, dist_scale, time_scale_days, verbose, nrand)
def test_aliases_fields_are_skipped_unless_themselves_aliased(self): data = dict( guid='http://news.com/rss/1234abcd', published_parsed=struct_time([2015, 2, 25, 16, 45, 23, 2, 56, 0]), updated_parsed=struct_time([2015, 2, 25, 17, 52, 11, 2, 56, 0]), title='Breaking News!', summary='This is body text.', link='http://news.com/1234abcd', ) field_aliases = [{'body_text': 'summary'}, {'summary': 'link'}] item = self.instance._create_item(data, field_aliases) self.assertEqual(item.get('abstract'), 'http://news.com/1234abcd')
def test_timetuple(self): self.assertEqual( JalaliDate(1361, 6, 15).timetuple(), time.struct_time((1361, 6, 15, 0, 0, 0, 2, 170, -1)))
if gmail_dir == '': gmail_dir = '.\email.txt' f = open(gmail_dir, 'r') line = f.readlines() master_email = line[0] f.close() l = input( 'Enter the maximum duration (in minutes) or cutoff time (hh:mm) of your remote desktop session (default is 60):\n' ) if l == '': l = '60' if (l.find(':') > 0): now = time.localtime() then = list(now) then[3:6] = [int(i) for i in (l.split(":") + [0])] then = time.struct_time(then) if (then < now): print("\nThe scheduled end time must be today sometime in the future.") print("\nPress any key to exit.") input() quit() l = (time.mktime(then) - time.mktime(now)) / 60 # Convert time in seconds to time in minutes else: l = float(l) l = l * 60 # convert maximum duration to seconds l -= 30 # terminate 30 seconds earlier in case another remote desktop session is scheduled after this one session = {} session[ 'time'] = l # required for generate function in case code expires after session end time
def decode(self): """decode the current _TP_VPe_HHMMSS value into a Python struct_time """ return struct_time( (0, 0, 0, self['Hour'].decode(), self['Min'].decode(), self['Sec'].decode(), 0, 0, 0))
from adafruit_pyportal import PyPortal from adafruit_bitmap_font import bitmap_font from adafruit_display_text.text_area import TextArea # The time of the thing! EVENT_YEAR = 2019 EVENT_MONTH = 4 EVENT_DAY = 15 EVENT_HOUR = 9 EVENT_MINUTE = 0 # we'll make a python-friendly structure event_time = time.struct_time(( EVENT_YEAR, EVENT_MONTH, EVENT_DAY, EVENT_HOUR, EVENT_MINUTE, 0, # we don't track seconds -1, -1, False)) # we dont know day of week/year or DST # determine the current working directory # needed so we know where to find files cwd = ("/" + __file__).rsplit('/', 1)[0] # Initialize the pyportal object and let us know what data to fetch and where # to display it pyportal = PyPortal(status_neopixel=board.NEOPIXEL, default_bg=cwd + "/countdown_background.bmp") big_font = bitmap_font.load_font(cwd + "/fonts/Helvetica-Bold-36.bdf") big_font.load_glyphs(b'0123456789') # pre-load glyphs for fast printing
def utctimetuple(self): return time.struct_time((2013, 8, 1, 0, 0, 0, 3, 217, 0))
last_check = None q = RowCountdown(display, displayio) idx = 0 while True: one_second_start = time.monotonic() if last_check is None or time.monotonic() > last_check + 864000: try: network.get_local_time() last_check = time.monotonic() except RuntimeError as e: print("Some error occured, retrying! -", e) secs = q.timediff( t1=time.mktime(time.struct_time((2021, 1, 1, 0, 0, 0, 4, -1, -1)))) #print("Days Left = ",dleft) if secs < 7200: print("HOW DID WE GET HERE") n = NEW_YEAR() display.show(n.group) while True: n.one_cycle() ball_pos = 16 - (secs // 5400) if ball_pos < 0: ball_pos = 0 elif ball_pos > 16: ball_pos = 16 q.tile_grid.y = ball_pos while time.monotonic() < one_second_start + 1:
def create(year=1970, month=1, day=1, hour=0, min=0, sec=0): return struct_time((year, month, day, hour, min, sec, 0, 0, 0))
def test_parse(self): """Test parse.""" res = Timing.parse("20180605_215959") self.assertEqual( time.struct_time((2018, 6, 5, 21, 59, 59, 1, 156, -1)), res)
if change_namespace: x = 2 return else: x = 1 def nested(): return x recur(change_namespace=1) return nested() assert recur() == 1 #issue 131 import time import datetime target = time.struct_time([1970, 1, 1, 0, 0, 0, 3, 1, 0]) assert time.gmtime(0).args == target.args target = time.struct_time([1970, 1, 1, 0, 1, 40, 3, 1, 0]) assert time.gmtime(100).args == target.args target = time.struct_time([2001, 9, 9, 1, 46, 40, 6, 252, 0]) assert time.gmtime(1000000000).args == target.args target1 = datetime.datetime(1969, 12, 31, 12, 0) target2 = datetime.datetime(1970, 1, 1, 12, 0) ## depending on timezone this could be any hour near midnight Jan 1st, 1970 assert target1 <= datetime.datetime.fromtimestamp(0) <= target2 try: time.asctime(1) except TypeError: pass except:
def _get_timerange(self, t): st = time.localtime(int(t)) start = int( time.mktime(time.struct_time((st[0], st[1], st[2], 0, 0, 0, st[6], st[7], st[8])))) end = start + 86399 return start, end
def quickptime(str): return time.struct_time((int(str[0:4]), int(str[4:6]), int(str[6:8]), int(str[8:10]), int(str[10:12]), 0, -1, -1, 0))
def insert(urlCode_url, patterns): urlCode = urlCode_url[0] url = urlCode_url[1] #convert patterns from tuple to list patterns = list(patterns) # standard order [0,1,2,3,4,5] = [buyCCY, sellCCY, bid, offer, date, unit] order = convertToList(patterns[order_col]) postData = patterns[postData_col] recursive_pt = patterns[recursive_col:recursive_col + 2] endPattern = patterns[main_end_col] inverse = patterns[inverse_col] patterns = patterns[:postData_col - 1] #check recursive crawl if recursive_pt[0] != '': listOfUrls = getListOfUrl(url, recursive_pt[0], recursive_pt[1]) else: listOfUrls = [url] for link in listOfUrls: data = Connect2Web(link, patterns, order, postData, endPattern) vals = [] for e in data: #[buyCCY, sellCCY, bid, offer, date, unit] full_date = e[date_pos].strip() if full_date and full_date != '-': try: full_date = time.strptime(full_date, '%A, %d %b %Y %H:%M:%S') except ValueError: try: full_date = time.strptime(full_date, '%m/%d/%Y at %H:%M %p') except ValueError: try: full_date = time.strptime(full_date, '%d/%m %H:%M:%S') except ValueError: try: full_date = time.strptime( full_date, '%Y-%m-%d %H:%M:%S') except ValueError: print "Date Time format does not matched our stored format. Please review and update!" break else: #if it does not provide the time, take the current time full_date = datetime.datetime.now().timetuple() #deal with %m/%d - no year indicator, have to push the current year into time_struct/tuple format if full_date[0] == 1900: #year part full_date = time.struct_time( tuple([time.localtime()[0]]) + full_date[1:] ) #tuple objects are immutable, need to construct a new obj date_p = strftime('%d-%m-%Y', full_date) short_date = strftime('%d%m%y', full_date) time_p = strftime('%H:%M:%S', full_date) #change date item to push it into a correct format e[4] = date_p #deal with CCY issue for i in range(0, 2): #special case for Austria if e[i] == 'Austria' or e[i] == "": continue #if it is not a valid code/country/pair if not getCode(e[i]): e[i] = -1 break else: code = getCode(e[i]) #it is a pair if len(code) == 6: e[1] = code[3:] e[0] = code[:3] break #it is a country name or a valid code else: e[i] = code if e[0] == -1 or e[1] == -1: break elif e[0] == 'Austria' or e[1] == 'Austria' or e[0] == e[1]: continue else: buyCCY = e[0] sellCCY = e[1] #special case for EZFX if urlCode != 'EZFX': ID = generateID(short_date, urlCode, buyCCY, sellCCY) elif data.index(e) % 2 == 0: ID = generateID(short_date, 'MAY', buyCCY, sellCCY) else: #get buyCCY, sellCCY from Maybank rates. the 1st assignment for Insert to DB, 2nd for generating ID e[0] = data[data.index(e) - 1][0] e[1] = data[data.index(e) - 1][1] buyCCY = getCode(e[0]) sellCCY = getCode(e[1]) if not buyCCY or not sellCCY or buyCCY == sellCCY: continue ID = generateID(short_date, 'CIT', buyCCY, sellCCY) #remove , in bid and offer values e[2] = e[2].replace(',', '') e[3] = e[3].replace(',', '') #do not add if it is all 0 if e[2] != '' and e[3] != '': if round(float(e[2]), 4) == 0.0000 and round(float(e[3]), 4) == 0.0000: continue row = [ID, urlCode] #push all into a tuple according pre-set format for entry in e: row.append(entry) row.insert(len(row) - 1, time_p) vals.append(tuple(row)) #determine which table to push data if urlCode == 'TRA' or urlCode == 'MMM' or urlCode == 'MUS': nameOfTb = 'RATES' else: nameOfTb = urlCode + 'rates' db = MySQLdb.connect("localhost", "root", "ezfx0109", "crawlerdb") cursor = db.cursor() #Prepare SQL query to INSERT a record into the database. sql = "INSERT IGNORE INTO " + nameOfTb + " (ID, URL, BUYCCY, SELLCCY, BID, OFFER, DATE_P, TIME_P, UNIT)\ VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)" try: # Execute the SQL command cursor.executemany(sql, tuple(vals)) # Commit your changes in the database db.commit() # add Inverse sql = "UPDATE " + nameOfTb + " SET Inverse = '%s' WHERE url = '%s'" % ( 'Y', urlCode) if inverse: cursor.execute(sql) # Commit your changes in the database db.commit() except: # Rollback in case there is any error db.rollback() db.close()
def user_stat_data(v): days = int(request.args.get("days", 30)) now = time.gmtime() midnight_this_morning = time.struct_time( (now.tm_year, now.tm_mon, now.tm_mday, 0, 0, 0, now.tm_wday, now.tm_yday, 0)) today_cutoff = calendar.timegm(midnight_this_morning) day = 3600 * 24 day_cutoffs = [today_cutoff - day * i for i in range(days)] day_cutoffs.insert(0, calendar.timegm(now)) daily_signups = [{ "date": time.strftime("%d %b %Y", time.gmtime(day_cutoffs[i + 1])), "day_start": day_cutoffs[i + 1], "signups": db.query(User).filter(User.created_utc < day_cutoffs[i], User.created_utc > day_cutoffs[i + 1]).count() } for i in range(len(day_cutoffs) - 1)] user_stats = { 'current_users': db.query(User).filter_by(is_banned=0, reserved=None).count(), 'banned_users': db.query(User).filter(User.is_banned != 0).count(), 'reserved_users': db.query(User).filter(User.reserved != None).count(), 'email_verified_users': db.query(User).filter_by(is_banned=0, is_activated=True).count(), 'real_id_verified_users': db.query(User).filter(User.reserved != None, User.real_id != None).count() } post_stats = [{ "date": time.strftime("%d %b %Y", time.gmtime(day_cutoffs[i + 1])), "day_start": day_cutoffs[i + 1], "posts": db.query(Submission).filter( Submission.created_utc < day_cutoffs[i], Submission.created_utc > day_cutoffs[i + 1]).count() } for i in range(len(day_cutoffs) - 1)] guild_stats = [{ "date": time.strftime("%d %b %Y", time.gmtime(day_cutoffs[i + 1])), "day_start": day_cutoffs[i + 1], "members": db.query(Board).filter(Board.created_utc < day_cutoffs[i], Board.created_utc > day_cutoffs[i + 1]).count() } for i in range(len(day_cutoffs) - 1)] comment_stats = [{ "date": time.strftime("%d %b %Y", time.gmtime(day_cutoffs[i + 1])), "day_start": day_cutoffs[i + 1], "comments": db.query(Comment).filter( Comment.created_utc < day_cutoffs[i], Comment.created_utc > day_cutoffs[i + 1]).count() } for i in range(len(day_cutoffs) - 1)] vote_stats = [{ "date": time.strftime("%d %b %Y", time.gmtime(day_cutoffs[i + 1])), "day_start": day_cutoffs[i + 1], "votes": db.query(Vote).filter(Vote.created_utc < day_cutoffs[i], Vote.created_utc > day_cutoffs[i + 1]).count() } for i in range(len(day_cutoffs) - 1)] x = create_plot(sign_ups={'daily_signups': daily_signups}, guilds={'guild_stats': guild_stats}, posts={'post_stats': post_stats}, comments={'comment_stats': comment_stats}, votes={'vote_stats': vote_stats}) final = { "user_stats": user_stats, "signup_data": daily_signups, "post_data": post_stats, "guild_data": guild_stats, "comment_data": comment_stats, "vote_data": vote_stats, "single_plot": f"https://i.ruqqus.com/{x[0]}", "multi_plot": f"https://i.ruqqus.com/{x[1]}" } return jsonify(final)
def _strptime(data_string, format='%a %b %d %H:%M:%S %Y'): """Return a time struct based on the input string and the format string.""" global _TimeRE_cache global _regex_cache with _cache_lock: if _getlang() != _TimeRE_cache.locale_time.lang: _TimeRE_cache = TimeRE() _regex_cache.clear() if len(_regex_cache) > _CACHE_MAX_SIZE: _regex_cache.clear() locale_time = _TimeRE_cache.locale_time format_regex = _regex_cache.get(format) if not format_regex: try: format_regex = _TimeRE_cache.compile(format) except KeyError as err: bad_directive = err.args[0] if bad_directive == '\\': bad_directive = '%' del err raise ValueError("'%s' is a bad directive in format '%s'" % (bad_directive, format)) except IndexError: raise ValueError("stray %% in format '%s'" % format) _regex_cache[format] = format_regex found = format_regex.match(data_string) if not found: raise ValueError('time data %r does not match format %r' % (data_string, format)) if len(data_string) != found.end(): raise ValueError('unconverted data remains: %s' % data_string[found.end():]) year = 1900 month = day = 1 hour = minute = second = fraction = 0 tz = -1 week_of_year = -1 week_of_year_start = -1 weekday = julian = -1 found_dict = found.groupdict() for group_key in found_dict.iterkeys(): if group_key == 'y': year = int(found_dict['y']) if year <= 68: year += 2000 else: year += 1900 elif group_key == 'Y': year = int(found_dict['Y']) elif group_key == 'm': month = int(found_dict['m']) elif group_key == 'B': month = locale_time.f_month.index(found_dict['B'].lower()) elif group_key == 'b': month = locale_time.a_month.index(found_dict['b'].lower()) elif group_key == 'd': day = int(found_dict['d']) elif group_key == 'H': hour = int(found_dict['H']) elif group_key == 'I': hour = int(found_dict['I']) ampm = found_dict.get('p', '').lower() if ampm in ('', locale_time.am_pm[0]): if hour == 12: hour = 0 elif ampm == locale_time.am_pm[1]: if hour != 12: hour += 12 elif group_key == 'M': minute = int(found_dict['M']) elif group_key == 'S': second = int(found_dict['S']) elif group_key == 'f': s = found_dict['f'] s += '0' * (6 - len(s)) fraction = int(s) elif group_key == 'A': weekday = locale_time.f_weekday.index(found_dict['A'].lower()) elif group_key == 'a': weekday = locale_time.a_weekday.index(found_dict['a'].lower()) elif group_key == 'w': weekday = int(found_dict['w']) if weekday == 0: weekday = 6 else: weekday -= 1 elif group_key == 'j': julian = int(found_dict['j']) elif group_key in ('U', 'W'): week_of_year = int(found_dict[group_key]) if group_key == 'U': week_of_year_start = 6 else: week_of_year_start = 0 elif group_key == 'Z': found_zone = found_dict['Z'].lower() for value, tz_values in enumerate(locale_time.timezone): if found_zone in tz_values: if time.tzname[0] == time.tzname[ 1] and time.daylight and found_zone not in ('utc', 'gmt'): break else: tz = value break if julian == -1 and week_of_year != -1 and weekday != -1: week_starts_Mon = True if week_of_year_start == 0 else False julian = _calc_julian_from_U_or_W(year, week_of_year, weekday, week_starts_Mon) if julian == -1: julian = datetime_date(year, month, day).toordinal() - datetime_date( year, 1, 1).toordinal() + 1 else: datetime_result = datetime_date.fromordinal( julian - 1 + datetime_date(year, 1, 1).toordinal()) year = datetime_result.year month = datetime_result.month day = datetime_result.day if weekday == -1: weekday = datetime_date(year, month, day).weekday() return (time.struct_time((year, month, day, hour, minute, second, weekday, julian, tz)), fraction)
def gmtime(): return time.struct_time((2012, 9, 9, 12, 9, 33, 6, 253, 0))
def test_POSIXlt_from_invalidpythontime(): x = [time.struct_time(_dateval_tuple), time.struct_time(_dateval_tuple)] x.append('foo') with pytest.raises(ValueError): robjects.POSIXlt(x)
data = results[0] location = data['geometry']['location'] lat, lng = location['lat'], location['lng'] source = "%s,%s" % (lat, lng) print source with open('source.txt', 'w') as sourcef: sourcef.write("%s,%s\n" % (lat, lng)) results = reverse_geocode(lat, lng) print 'Reverse geocoded address for lat,lng: %.3f,%.3f' % (lat, lng) print '\n'.join([x['formatted_address'] for x in results]) print time.sleep(5) eight_am = int( time.mktime(time.struct_time([2014, 7, 14, 8, 0, 0, 0, 0, 0]))) # starting longitude lng_f = args['min_lng'] min_lat_f = args['min_lat'] while lng_f < args['max_lng']: # starting latitude lat_f = args['max_lat'] while lat_f >= min_lat_f: print 'Querying directions for (%.6f, %.6f)' % (lat_f, lng_f) key = '%.6f,%.6f' % (lat_f, lng_f)
def test_POSIXct_from_invalidpythontime(): x = [time.struct_time(_dateval_tuple), time.struct_time(_dateval_tuple)] x.append('foo') # string 'foo' does not have attribute 'tm_zone' with pytest.raises(AttributeError): robjects.POSIXct(x)
# display the current time since its the last-update updated_at = "%d/%d\n%d:%02d" % now[1:5] magtag.set_text(updated_at, 6, False) # get data from the Covid Tracking Project value = magtag.fetch() print("Response is", value) # OK we're done! magtag.peripherals.neopixels.fill(0x000F00) # greten except (ValueError, RuntimeError) as e: print("Some error occured, trying again later -", e) time.sleep(2) # let screen finish updating # we only wanna wake up once a day, around the event update time: event_time = time.struct_time((now[0], now[1], now[2], DAILY_UPDATE_HOUR, 0, 0, -1, -1, now[8])) # how long is that from now? remaining = time.mktime(event_time) - time.mktime(now) if remaining < 0: # ah its aready happened today... remaining += 24 * 60 * 60 # wrap around to the next day remaining_hrs = remaining // 3660 remaining_min = (remaining % 3600) // 60 print("Gonna zzz for %d hours, %d minutes" % (remaining_hrs, remaining_min)) # Turn it all off and go to bed till the next update time magtag.exit_and_deep_sleep(remaining)
from tqdm import tqdm import requests import time import os BaseUrl_1 = "http://61.56.11.42/nidsB/images/QPESUMSgoogle/cref2d_rad/COMPREF." "yyyymmdd.hhmm" BaseUrl_2 = ".LST.png" archfiles = os.listdir(".") timestart = time.time() - 86400 * 7 time_st = time.struct_time(time.localtime(timestart)) timestart = timestart - ((time_st[4] % 10) * 60 + time_st[5]) while timestart < time.time() - 600: timestring = time.strftime("%Y%m%d.%H%M", time.localtime(timestart)) url = BaseUrl_1 + timestring + BaseUrl_2 filename = time.strftime("%Y%m%d_%H%M", time.localtime(timestart)) + ".png" print "Finding " + filename if (filename not in archfiles): print("Downloading %s as %s \r\n" % (url, filename)) response = requests.get(url, stream=True) with open(filename, "wb") as handle: for data in tqdm(response.iter_content(1024 * 100)): handle.write(data) pass timestart = timestart + 600 pass print "finish downloading last 7 days data" t = time.time()
#### TIME CLASS #### import time print(time.asctime()) # String contendo o tempo #print(time.localtime()) # struct contendo dados sobre data e fuso #print(dir(time)) #print(help(time.strftime)) #time.process_time() #time.gmtime() a = time.localtime() b = time.time() c = time.struct_time() print(f'Hora: {a[3]}hrs {a[4]}min') time.sleep(2) # espera em segundos
if (time.tzname[0] == time.tzname[1] and time.daylight and found_zone not in ("utc", "gmt")): break else: tz = value break # If we know the week of the year and what day of that week, we can figure # out the Julian day of the year. if julian == -1 and week_of_year != -1 and weekday != -1: week_starts_Mon = True if week_of_year_start == 0 else False julian = _calc_julian_from_U_or_W(year, week_of_year, weekday, week_starts_Mon) # Cannot pre-calculate datetime_date() since can change in Julian # calculation and thus could have different value for the day of the week # calculation. if julian == -1: # Need to add 1 to result since first day of the year is 1, not 0. julian = datetime_date(year, month, day).toordinal() - \ datetime_date(year, 1, 1).toordinal() + 1 else: # Assume that if they bothered to include Julian day it will # be accurate. datetime_result = datetime_date.fromordinal( (julian - 1) + datetime_date(year, 1, 1).toordinal()) year = datetime_result.year month = datetime_result.month day = datetime_result.day if weekday == -1: weekday = datetime_date(year, month, day).weekday() return time.struct_time( (year, month, day, hour, minute, second, weekday, julian, tz))
def test_POSIXct_from_pythontime(): x = [time.struct_time(_dateval_tuple), time.struct_time(_dateval_tuple)] res = robjects.POSIXct(x) assert len(x) == 2
async def show_async( rpc_port: Optional[int], state: bool, show_connections: bool, exit_node: bool, add_connection: str, remove_connection: str, block_header_hash_by_height: str, block_by_header_hash: str, ) -> None: import aiohttp import time import traceback from time import localtime, struct_time from typing import List, Optional from chia.consensus.block_record import BlockRecord from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.server.outbound_message import NodeType from chia.types.full_block import FullBlock from chia.util.bech32m import encode_puzzle_hash from chia.util.byte_types import hexstr_to_bytes from chia.util.config import load_config from chia.util.default_root import DEFAULT_ROOT_PATH from chia.util.ints import uint16 from chia.util.misc import format_bytes try: config = load_config(DEFAULT_ROOT_PATH, "config.yaml") self_hostname = config["self_hostname"] if rpc_port is None: rpc_port = config["full_node"]["rpc_port"] client = await FullNodeRpcClient.create(self_hostname, uint16(rpc_port), DEFAULT_ROOT_PATH, config) if state: blockchain_state = await client.get_blockchain_state() if blockchain_state is None: print("There is no blockchain found yet. Try again shortly") return None peak: Optional[BlockRecord] = blockchain_state["peak"] difficulty = blockchain_state["difficulty"] sub_slot_iters = blockchain_state["sub_slot_iters"] synced = blockchain_state["sync"]["synced"] sync_mode = blockchain_state["sync"]["sync_mode"] total_iters = peak.total_iters if peak is not None else 0 num_blocks: int = 10 if synced: print("Current Blockchain Status: Full Node Synced") print("\nPeak: Hash:", peak.header_hash if peak is not None else "") elif peak is not None and sync_mode: sync_max_block = blockchain_state["sync"]["sync_tip_height"] sync_current_block = blockchain_state["sync"][ "sync_progress_height"] print( f"Current Blockchain Status: Syncing {sync_current_block}/{sync_max_block}." ) print("Peak: Hash:", peak.header_hash if peak is not None else "") elif peak is not None: print( f"Current Blockchain Status: Not Synced. Peak height: {peak.height}" ) else: print("\nSearching for an initial chain\n") print( "You may be able to expedite with 'chia show -a host:port' using a known node.\n" ) if peak is not None: if peak.is_transaction_block: peak_time = peak.timestamp else: peak_hash = peak.header_hash curr = await client.get_block_record(peak_hash) while curr is not None and not curr.is_transaction_block: curr = await client.get_block_record(curr.prev_hash) peak_time = curr.timestamp peak_time_struct = struct_time(localtime(peak_time)) print( " Time:", f"{time.strftime('%a %b %d %Y %T %Z', peak_time_struct)}", f" Height: {peak.height:>10}\n", ) print("Estimated network space: ", end="") print(format_bytes(blockchain_state["space"])) print(f"Current difficulty: {difficulty}") print(f"Current VDF sub_slot_iters: {sub_slot_iters}") print("Total iterations since the start of the blockchain:", total_iters) print("") print(" Height: | Hash:") added_blocks: List[BlockRecord] = [] curr = await client.get_block_record(peak.header_hash) while curr is not None and len( added_blocks) < num_blocks and curr.height > 0: added_blocks.append(curr) curr = await client.get_block_record(curr.prev_hash) for b in added_blocks: print(f"{b.height:>9} | {b.header_hash}") else: print("Blockchain has no blocks yet") # if called together with show_connections, leave a blank line if show_connections: print("") if show_connections: connections = await client.get_connections() print("Connections:") print( "Type IP Ports NodeID Last Connect" + " MiB Up|Dwn") for con in connections: last_connect_tuple = struct_time( localtime(con["last_message_time"])) last_connect = time.strftime("%b %d %T", last_connect_tuple) mb_down = con["bytes_read"] / (1024 * 1024) mb_up = con["bytes_written"] / (1024 * 1024) host = con["peer_host"] # Strip IPv6 brackets if host[0] == "[": host = host[1:39] # Nodetype length is 9 because INTRODUCER will be deprecated if NodeType(con["type"]) is NodeType.FULL_NODE: peak_height = con["peak_height"] peak_hash = con["peak_hash"] if peak_hash is None: peak_hash = "No Info" if peak_height is None: peak_height = 0 con_str = ( f"{NodeType(con['type']).name:9} {host:38} " f"{con['peer_port']:5}/{con['peer_server_port']:<5}" f" {con['node_id'].hex()[:8]}... " f"{last_connect} " f"{mb_up:7.1f}|{mb_down:<7.1f}" f"\n " f"-SB Height: {peak_height:8.0f} -Hash: {peak_hash[2:10]}..." ) else: con_str = ( f"{NodeType(con['type']).name:9} {host:38} " f"{con['peer_port']:5}/{con['peer_server_port']:<5}" f" {con['node_id'].hex()[:8]}... " f"{last_connect} " f"{mb_up:7.1f}|{mb_down:<7.1f}") print(con_str) # if called together with state, leave a blank line if state: print("") if exit_node: node_stop = await client.stop_node() print(node_stop, "Node stopped") if add_connection: if ":" not in add_connection: print( "Enter a valid IP and port in the following format: 10.5.4.3:8000" ) else: ip, port = ( ":".join(add_connection.split(":")[:-1]), add_connection.split(":")[-1], ) print(f"Connecting to {ip}, {port}") try: await client.open_connection(ip, int(port)) except Exception: print(f"Failed to connect to {ip}:{port}") if remove_connection: result_txt = "" if len(remove_connection) != 8: result_txt = "Invalid NodeID. Do not include '.'" else: connections = await client.get_connections() for con in connections: if remove_connection == con["node_id"].hex()[:8]: print("Attempting to disconnect", "NodeID", remove_connection) try: await client.close_connection(con["node_id"]) except Exception: result_txt = f"Failed to disconnect NodeID {remove_connection}" else: result_txt = f"NodeID {remove_connection}... {NodeType(con['type']).name} " f"{con['peer_host']} disconnected" elif result_txt == "": result_txt = f"NodeID {remove_connection}... not found" print(result_txt) if block_header_hash_by_height != "": block_header = await client.get_block_record_by_height( block_header_hash_by_height) if block_header is not None: print(f"Header hash of block {block_header_hash_by_height}: " f"{block_header.header_hash.hex()}") else: print("Block height", block_header_hash_by_height, "not found") if block_by_header_hash != "": block: Optional[BlockRecord] = await client.get_block_record( hexstr_to_bytes(block_by_header_hash)) full_block: Optional[FullBlock] = await client.get_block( hexstr_to_bytes(block_by_header_hash)) # Would like to have a verbose flag for this if block is not None: assert full_block is not None prev_b = await client.get_block_record(block.prev_hash) if prev_b is not None: difficulty = block.weight - prev_b.weight else: difficulty = block.weight if block.is_transaction_block: assert full_block.transactions_info is not None block_time = struct_time( localtime( full_block.foliage_transaction_block.timestamp if full_block.foliage_transaction_block else None)) block_time_string = time.strftime("%a %b %d %Y %T %Z", block_time) cost = str(full_block.transactions_info.cost) tx_filter_hash = "Not a transaction block" if full_block.foliage_transaction_block: tx_filter_hash = full_block.foliage_transaction_block.filter_hash fees: Any = block.fees else: block_time_string = "Not a transaction block" cost = "Not a transaction block" tx_filter_hash = "Not a transaction block" fees = "Not a transaction block" address_prefix = config["network_overrides"]["config"][ config["selected_network"]]["address_prefix"] farmer_address = encode_puzzle_hash(block.farmer_puzzle_hash, address_prefix) pool_address = encode_puzzle_hash(block.pool_puzzle_hash, address_prefix) pool_pk = (full_block.reward_chain_block.proof_of_space. pool_public_key if full_block.reward_chain_block. proof_of_space.pool_public_key is not None else "Pay to pool puzzle hash") print( f"Block Height {block.height}\n" f"Header Hash 0x{block.header_hash.hex()}\n" f"Timestamp {block_time_string}\n" f"Weight {block.weight}\n" f"Previous Block 0x{block.prev_hash.hex()}\n" f"Difficulty {difficulty}\n" f"Sub-slot iters {block.sub_slot_iters}\n" f"Cost {cost}\n" f"Total VDF Iterations {block.total_iters}\n" f"Is a Transaction Block?{block.is_transaction_block}\n" f"Deficit {block.deficit}\n" f"PoSpace 'k' Size {full_block.reward_chain_block.proof_of_space.size}\n" f"Plot Public Key 0x{full_block.reward_chain_block.proof_of_space.plot_public_key}\n" f"Pool Public Key {pool_pk}\n" f"Tx Filter Hash {tx_filter_hash}\n" f"Farmer Address {farmer_address}\n" f"Pool Address {pool_address}\n" f"Fees Amount {fees}\n") else: print("Block with header hash", block_header_hash_by_height, "not found") except Exception as e: if isinstance(e, aiohttp.ClientConnectorError): print( f"Connection error. Check if full node rpc is running at {rpc_port}" ) print("This is normal if full node is still starting up") else: tb = traceback.format_exc() print(f"Exception from 'show' {tb}") client.close() await client.await_closed()
def human_local_time(timestamp): time_local = struct_time(localtime(timestamp)) return time.strftime("%a %b %d %Y %T %Z", time_local)
#In python the time instanta are counted since 12 AM , 1st january 1970 #The function time() of the module the returns the total number of ticks spent since 12 AM, 1st january 1970 #A tick can be as the smallest unit to measure the time import time print(time.time()) print(time.localtime(time.time())) time.sleep(1) #in second #Output """ 1590693082.6978915 #in the form of tuple time.struct_time(tm_year=2020, tm_mon=5, tm_mday=29, tm_hour=0, tm_min=41, tm_sec=22, tm_wday=4, tm_yday=150, tm_isdst=0) """ import datetime print(datetime.datetime.now()) # Output : 2020-05-29 00:45:34.018003
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"): """Return a time struct based on the input string and the format string.""" tt = _strptime(data_string, format)[0] return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
def geolocation(self): """Most recent geolocation of the modem as measured by the Iridium constellation including a timestamp of when geolocation measurement was made. The response is in the form: (<x>, <y>, <z>, <timestamp>) <x>, <y>, <z> is a geolocation grid code from an earth centered Cartesian coordinate system, using dimensions, x, y, and z, to specify location. The coordinate system is aligned such that the z-axis is aligned with the north and south poles, leaving the x-axis and y-axis to lie in the plane containing the equator. The axes are aligned such that at 0 degrees latitude and 0 degrees longitude, both y and z are zero and x is positive (x = +6376, representing the nominal earth radius in kilometres). Each dimension of the geolocation grid code is displayed in decimal form using units of kilometres. Each dimension of the geolocation grid code has a minimum value of –6376, a maximum value of +6376, and a resolution of 4. This geolocation coordinate system is known as ECEF (acronym earth-centered, earth-fixed), also known as ECR (initialism for earth-centered rotational) <timestamp> is a time_struct The timestamp is assigned by the modem when the geolocation grid code received from the network is stored to the modem's internal memory. The timestamp used by the modem is Iridium system time, which is a running count of 90 millisecond intervals, since Sunday May 11, 2014, at 14:23:55 UTC (the most recent Iridium epoch). The timestamp returned by the modem is a 32-bit integer displayed in hexadecimal form. We convert the modem's timestamp and return it as a time_struct. The system time value is always expressed in UTC time. Returns a tuple: (int, int, int, time_struct) """ resp = self._uart_xfer("-MSGEO") if resp[-1].strip().decode() == "OK": temp = resp[1].strip().decode().split(":")[1].split(",") ticks_since_epoch = int(temp[3], 16) ms_since_epoch = (ticks_since_epoch * 90 ) # convert iridium ticks to milliseconds # milliseconds to seconds # hack to divide by 1000 and avoid using limited floating point math which throws the # calculations off quite a bit, this should be accurate to 1 second or so ms_str = str(ms_since_epoch) substring = ms_str[0:len(ms_str) - 3] secs_since_epoch = int(substring) # iridium epoch iridium_epoch = time.struct_time( ((2014), (5), 11, 14, 23, 55, 6, -1, -1)) iridium_epoch_unix = time.mktime(iridium_epoch) # add timestamp's seconds to the iridium epoch time_now_unix = iridium_epoch_unix + int(secs_since_epoch) return ( int(temp[0]), int(temp[1]), int(temp[2]), time.localtime(time_now_unix), ) return (None, ) * 4
async def show_async(args, parser): # print(args) try: client = await RpcClient.create(args.rpc_port) # print (dir(client)) # TODO: Add other rpc calls # TODO: pretty print response if args.state: blockchain_state = await client.get_blockchain_state() lca_block = blockchain_state["lca"] tips = blockchain_state["tips"] difficulty = blockchain_state["difficulty"] ips = blockchain_state["ips"] sync_mode = blockchain_state["sync_mode"] total_iters = lca_block.data.total_iters num_blocks: int = 10 if sync_mode: sync_max_block = await client.get_heaviest_block_seen() # print (max_block) print( "Current Blockchain Status. Full Node Syncing to", sync_max_block.data.height, ) else: print("Current Blockchain Status. Full Node Synced") print("Current least common ancestor:\n ", lca_block.header_hash) lca_time = struct_time(localtime(lca_block.data.timestamp)) # Should auto format the align right of LCA height print( " LCA time:", time.strftime("%a %b %d %Y %T %Z", lca_time), " LCA height:", lca_block.height, ) print("Heights of tips: " + str([h.height for h in tips])) print(f"Current difficulty: {difficulty}") print(f"Current VDF iterations per second: {ips:.0f}") print("Total iterations since genesis:", total_iters) print("") heads: List[HeaderBlock] = tips added_blocks: List[HeaderBlock] = [] while len(added_blocks) < num_blocks and len(heads) > 0: heads = sorted(heads, key=lambda b: b.height, reverse=True) max_block = heads[0] if max_block not in added_blocks: added_blocks.append(max_block) heads.remove(max_block) prev: Optional[HeaderBlock] = await client.get_header( max_block.prev_header_hash) if prev is not None: heads.append(prev) latest_blocks_labels = [] for i, b in enumerate(added_blocks): latest_blocks_labels.append( f"{b.height}:{b.header_hash}" f" {'LCA' if b.header_hash == lca_block.header_hash else ''}" f" {'TIP' if b.header_hash in [h.header_hash for h in tips] else ''}" ) for i in range(len(latest_blocks_labels)): if i < 2: print(latest_blocks_labels[i]) elif i == 2: print( latest_blocks_labels[i], "\n", " -----", ) else: print("", latest_blocks_labels[i]) # if called together with other arguments, leave a blank line if args.connections: print("") if args.connections: connections = await client.get_connections() print("Connections") print( f"Type IP Ports NodeID Last Connect" f" MB Up|Dwn") for con in connections: last_connect_tuple = struct_time( localtime(con["last_message_time"])) # last_connect = time.ctime(con['last_message_time']) last_connect = time.strftime("%b %d %T", last_connect_tuple) mb_down = con["bytes_read"] / 1024 mb_up = con["bytes_written"] / 1024 # print (last_connect) con_str = ( f"{NodeType(con['type']).name:9} {con['peer_host']:39} " f"{con['peer_port']:5}/{con['peer_server_port']:<5}" f"{con['node_id'].hex()[:10]}... " f"{last_connect} " f"{mb_down:7.1f}|{mb_up:<7.1f}") print(con_str) # if called together with other arguments, leave a blank line if args.state: print("") if args.exit_node: node_stop = await client.stop_node() print(node_stop, "Node stopped.") if args.add_connection: if ":" not in args.add_connection: print( "Enter a valid IP and port in the following format: 10.5.4.3:8000" ) else: ip, port = ( ":".join(args.add_connection.split(":")[:-1]), args.add_connection.split(":")[-1], ) print(f"Connecting to {ip}, {port}") try: await client.open_connection(ip, int(port)) except BaseException: # TODO: catch right exception print(f"Failed to connect to {ip}:{port}") if args.remove_connection: result_txt = "" if len(args.remove_connection) != 10: result_txt = "Invalid NodeID" else: connections = await client.get_connections() for con in connections: if args.remove_connection == con["node_id"].hex()[:10]: print("Attempting to disconnect", "NodeID", args.remove_connection) try: await client.close_connection(con["node_id"]) except BaseException: result_txt = ( f"Failed to disconnect NodeID {args.remove_connection}" ) else: result_txt = f"NodeID {args.remove_connection}... {NodeType(con['type']).name} " f"{con['peer_host']} disconnected." elif result_txt == "": result_txt = f"NodeID {args.remove_connection}... not found." print(result_txt) if args.block_header_hash != "": block_header = await client.get_block( hexstr_to_bytes(args.block_header_hash)) # print(dir(block_header)) if block_header is not None: print("Block header:") print(block_header.header) block_time = struct_time( localtime(block_header.header.data.timestamp)) print("Block time:", time.strftime("%a %b %d %Y %T %Z", block_time)) else: print("Block hash", args.block_header_hash, "not found.") if args.block_by_header_hash != "": block = await client.get_block( hexstr_to_bytes(args.block_by_header_hash)) # Would like to have a verbose flag for this if block is not None: prev_block_header_hash = block.header.data.prev_header_hash prev_block_header = await client.get_block( prev_block_header_hash) block_time = struct_time(localtime( block.header.data.timestamp)) block_time_string = time.strftime("%a %b %d %Y %T %Z", block_time) print("Block:") print( f"Header Hash 0x{args.block_by_header_hash}\n" f"Timestamp {block_time_string}\n" f"Height {block.header.data.height}\n" f"Weight {block.header.data.weight}\n" f"Previous Block 0x{block.header.data.prev_header_hash}\n" f"Cost {block.header.data.cost}\n" f"Difficulty {block.header.data.weight-prev_block_header.header.data.weight}\n" f"Total VDF Iterations {block.header.data.total_iters}\n" f"Block VDF Iterations {block.proof_of_time.number_of_iterations}\n" f"Proof of Space \'k\' Size {block.proof_of_space.size}\n" # f"Plot Public Key 0x{block.proof_of_space.plot_pubkey}\n" # f"Pool Public Key 0x{block.proof_of_space.pool_pubkey}\n" f"Tx Filter Hash {(block.transactions_filter)}\n" f"Tx Generator Hash {block.transactions_generator}\n" f"Coinbase Amount {block.header.data.coinbase.amount/1000000000000}\n" f"Coinbase Puzzle Hash 0x{block.header.data.coinbase.puzzle_hash}\n" f"Fees Amount {block.header.data.fees_coin.amount/1000000000000}\n" f"Fees Puzzle Hash 0x{block.header.data.fees_coin.puzzle_hash}\n" f"Aggregated Signature {block.header.data.aggregated_signature}\n" ) else: print("Block with header hash", args.block_by_header_hash, "not found.") except Exception as e: if isinstance(e, aiohttp.client_exceptions.ClientConnectorError): print( f"Connection error. Check if full node is running at {args.rpc_port}" ) else: print(f"Exception {e}") client.close() await client.await_closed()
def get_day_start_timestamp(t): st = time.localtime(int(t)) start = int(time.mktime(time.struct_time((st[0], st[1], st[2], 0, 0, 0, st[6], st[7], st[8])))) end = start + 86399 return start, end