def raw(self, buf, time = None, wire_size = None): packed = b"" if len(buf) == 0: return if wire_size is None: wire_size = len(buf) assert wire_size >= len(buf), "cap size > wire size!" if time is None: t = pytime.time() elif isinstance(time, (datetime.datetime, datetime.time)): #TODO: TZ? t = pytime.mktime(time.timetuple()) + (time.microsecond / 1000000.0) else: t = time ut = t - int(t) t = int(t) ut = int(ut * 1000000) packed+=pack("IIII", t,ut, # Timestamp len(buf), # Saved size wire_size, # Original size ) packed+=buf return packed
def add_MODISPBL_to_trajectory(ds, box_degrees=3): lats, lons, times = ds.lat.values, ds.lon.values, ds.time.values MODIS_day_idx = np.argwhere([i.hour == 23 for i in utils.as_datetime(times)]).squeeze() MODIS_night_idx = np.argwhere([i.hour == 11 for i in utils.as_datetime(times)]).squeeze() # dayfile = '/home/disk/eos4/jkcm/Data/CSET/Ryan/Daily_1x1_JHISTO_CTH_c6_day_v2_calboxes_top10_Interp_hif_zb_2011-2016.nc' dayfile = '/home/disk/eos4/jkcm/Data/CSET/Ryan/Daily_1x1_JHISTO_CTH_c6_day_v2_calboxes_top10_Interp_hif_zb_2011-2016_corrected.nc' nightfile = '/home/disk/eos4/jkcm/Data/CSET/Ryan/Daily_1x1_JHISTO_CTH_c6_night_v2_calboxes_top10_Interp_hif_zb_2011-2016.nc' vals = [] stds = [] nanfrac = [] for i in range(len(times)): if i in MODIS_day_idx: f = dayfile elif i in MODIS_night_idx: f = nightfile else: vals.append(np.nan) stds.append(np.nan) nanfrac.append(np.nan) continue with xr.open_dataset(f) as data: lat, lon, time = lats[i], lons[i], utils.as_datetime(times[i]) t_idx = np.argwhere(np.logical_and(data['days'].values == time.timetuple().tm_yday, data['years'].values == time.year))[0][0] x = data['cth'].sel(longitude=slice(lon - box_degrees/2, lon + box_degrees/2), latitude=slice(lat + box_degrees/2, lat - box_degrees/2)) z = x.isel(time=t_idx).values vals.append(np.nanmean(z)) stds.append(np.nanstd(z)) nanfrac.append(np.sum(np.isnan(z))/z.size) ds['MODIS_CTH'] = (('time'), np.array(vals)) ds['MODIS_CTH_std'] = (('time'), np.array(stds)) ds['MODIS_CTH_nanfrac'] = (('time'), np.array(nanfrac)) return ds
def set_e_exp(self, conn, parts): y = int(parts.pop(0)) mon = int(parts.pop(0)) d = int(parts.pop(0)) uuid = parts.pop(0) time = datetime(int(parts[0]), int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]), int(parts[5])) time = time.timetuple() for i in self.db.entries: if i.uuid == uuid: if self.check_last_mod(i, time) is True: sendmsg(conn, b"FAIL: Entry was modified. You should " b"refresh and if you're sure you want " b"to edit this entry try it again.") return i.set_expire(y, mon, d) break elif i is self.db.entries[-1]: sendmsg(conn, b"FAIL: Entry doesn't exist " b"anymore. You should refresh") return self.db.save() self.send_db(conn, [])
def set_e_exp(self, conn, parts): y = int(parts.pop(0)) mon = int(parts.pop(0)) d = int(parts.pop(0)) uuid = parts.pop(0) time = datetime(int(parts[0]), int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]), int(parts[5])) time = time.timetuple() for i in self.db.entries: if i.uuid == uuid: if self.check_last_mod(i, time) is True: sendmsg( conn, b"FAIL: Entry was modified. You should " b"refresh and if you're sure you want " b"to edit this entry try it again.") return i.set_expire(y, mon, d) break elif i is self.db.entries[-1]: sendmsg( conn, b"FAIL: Entry doesn't exist " b"anymore. You should refresh") return self.db.save() self.send_db(conn, [])
def set_g_title(self, conn, parts): title = parts.pop(0).decode() group_id = int(parts.pop(0)) time = datetime(int(parts[0]), int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]), int(parts[5])) time = time.timetuple() for i in self.db.groups: if i.id_ == group_id: if self.check_last_mod(i, time) is True: sendmsg( conn, b"FAIL: Group was modified. You should " b"refresh and if you're sure you want " b"to edit this group try it again.") return i.set_title(title) break elif i is self.db.groups[-1]: sendmsg( conn, b"FAIL: Group doesn't exist " b"anymore. You should refresh") return self.db.save() self.send_db(conn, [])
def show_comments(self, ticket, ignore_git_user=True): """ Shows the comments on a given ticket. INPUT: - ``ticket`` -- the ticket number - ``ignore_git_user`` -- whether to remove comments automatically added when the branch is updated. EXAMPLES:: sage: dev.trac.show_comments(100) # optional: internet ==================== was (6 years ago) fixed """ comments = [] changelog = self._anonymous_server_proxy.ticket.changeLog(int(ticket)) for time, author, field, oldvalue, newvalue, permanent in changelog: if field == 'comment' and newvalue and not (ignore_git_user and author == 'git'): comments.append( (_timerep(datetime.datetime(*(time.timetuple()[:6]))), author, newvalue)) self._UI.show('\n'.join([ '====================\n{0} ({1})\n{2}'.format( author, time, comment) for time, author, comment in reversed(comments) ]))
def _get_filename(self, time): product_name = str(self.product)[6:] year = time.year day = time.timetuple().tm_yday hour = time.hour minute = time.minute filename = f"OPERA_{product_name}_{year}_{day:03}_" f"{hour:02}_{minute:02}.hdf" return filename
def _get_php_date(format, time): # http://php.net/manual/en/datetime.formats.date.php # http://strftime.org/ # adapted from http://brandonwamboldt.ca/python-php-date-class-335/ _self = Contemplate time = datetime.datetime.fromtimestamp(time) timeStr = '' replacements = {} """ Day """ replacements['d'] = str( time.day ).zfill(2) replacements['D'] = calendar.day_abbr[ time.weekday() ] replacements['j'] = str( time.day ) replacements['l'] = calendar.day_name[ time.weekday() ] replacements['S'] = _self._get_ordinal_suffix( time.day ) replacements['w'] = str( time.weekday() ) replacements['z'] = str( time.timetuple().tm_yday ) """ Week """ replacements['W'] = str( time.isocalendar()[1] ) """ Month """ replacements['F'] = calendar.month_name[ time.month ] replacements['m'] = str( time.month ).zfill(2) replacements['M'] = calendar.month_abbr[ time.month ] replacements['n'] = str( time.month ) replacements['t'] = str( calendar.monthrange(time.year, time.month)[1] ) """ Year """ replacements['L'] = str(int( calendar.isleap(time.year) )) replacements['Y'] = str( time.year ) replacements['y'] = str( time.year )[2:] """ Time """ replacements['a'] = time.strftime("%p").lower() replacements['A'] = time.strftime("%p") replacements['g'] = str( int(time.strftime("%I")) ) replacements['G'] = str( int(time.strftime("%H")) ) replacements['h'] = time.strftime("%I") replacements['H'] = time.strftime("%H") replacements['i'] = str( time.minute ).zfill(2) replacements['s'] = str( time.second ).zfill(2) replacements['u'] = str( time.microsecond ) """ Timezone """ replacements['e'] = "" #_self.get_timezone() replacements['I'] = str( time.dst() ) for regex, replace in replacements.items(): format = format.replace(regex, replace) return format
def getUnixTimeStampFromHumanReadableTime(human_readable_time): ''' Method to get the Unix Time Stamp from a human readable time input. The function uses the datetime module and the mktime() function which uses the struct_time (9-tuple) to express the time in local time. The method returns a floating point number. 1. human_readable_time :: A Human readable time (e.g. '2018-04-02' or 'April 2, 2018') Output: 2. unix_time_stamp :: Unix time stamp converted from a human readable time. ''' time = parser.parse(human_readable_time) unix_time_stamp = mktime(time.timetuple()) return unix_time_stamp
def convertPSTTime2Unix(readable_time): ''' Functionality to convert Human readable time back to a UNIX time stamp Uses the datetime module and the 'mktime()' function, which uses the 'struct_time' (full 9-tuple) which expresses the time in local time. Function returns a floating point number. INPUT: 1. 'readable_time' :: - A human readable time - E.g. '2018-04-02' or 'April 2, 2018' ''' time = parser.parse(readable_time) unix_time = mktime(time.timetuple()) return unix_time
def request_nws(self): # request hourly temperature forecase from national weather service url = 'https://api.weather.gov/points/' + self.lat + ',' + self.lon r = requests.get(url) city = r.json()['properties']['relativeLocation']['properties']['city'] state = r.json( )['properties']['relativeLocation']['properties']['state'] self.location = city + ", " + state r2 = requests.get(r.json()['properties']['forecastHourly']) out = pd.DataFrame(columns=[ 'lat', 'lon', 'year', 'month', 'day', 'hour', 'date', 'doy', 'temp' ]) for i in range(len(r2.json()['properties']['periods'])): lat = self.lat lon = self.lon time = datetime.datetime.strptime( r2.json()['properties']['periods'][i]['startTime'][:-6], '%Y-%m-%dT%H:%M:%S') year = time.year month = time.month day = time.day hour = time.hour date = time.date() doy = time.timetuple().tm_yday temp = r2.json()['properties']['periods'][i]['temperature'] temp = (temp - 32) * 5 / 9 out.loc[i] = [lat, lon, year, month, day, hour, date, doy, temp] dates = out['date'].unique() self.dates = dates out = out.groupby(['doy'], as_index=False, group_keys=False).agg( {'temp': ['max', 'min', 'mean']}) out.columns = ['doy', 'T_max', 'T_min', 'T_avg'] out['T_rng'] = out['T_max'] - out['T_min'] out.insert(1, "lat", self.lat) out.insert(2, 'lon', self.lon) out[['lat', 'lon']] = out[['lat', 'lon']].astype(float) # add the elevation and theoretical radiation ERA5_process.process.elevation_function(out, 'lat', 'lon') out_elev = ERA5_process.process(out) out_rad = out_elev.add_radiation() self.out_rad = out_rad
def format_time(time: datetime): '''Formats datetime to yyyDOYhhmmss.sss # i think this is an error, should be yyyyDOYhhmmss.sss args: time: a datetime object return: a string of the time ''' year = time.year doy = time.timetuple().tm_yday hour = time.hour minute = time.minute second = time.second millis = time.microsecond / 1000 final_str = f"{year:04d}{doy:03d}{hour:02d}{minute:02d}{second:02d}.{millis:03.0f}" return final_str
def postVel(oldList, newList, elapsedTime): #get the time of day time = datetime.now() timeTup = time.timetuple() currHour = timeTup[3] #keep track of how posts ranks have changed rankChanges = [] #find the changes in post ranks for post in newList: #of the posts still in the top 25 if post in oldList: prevRank = oldList.index(post) newRank = newList.index(post) change = prevRank - newRank rankChanges.append(change) else: rankChanges.append('new') #write data on how much rankings changed in given time velocity = (rankChanges, elapsedTime, currHour) postVelocity.write(str(velocity)+'\n')
def delete_group(self, conn, parts): group_id = int(parts.pop(0)) time = datetime(int(parts[0]), int(parts[1]), int(parts[2]), int(parts[3]), int(parts[4]), int(parts[5])) time = time.timetuple() for i in self.db.groups: if i.id_ == group_id: if self.check_last_mod(i, time) is True: sendmsg(conn, b"FAIL: Group was modified. You should " b"refresh and if you're sure you want " b"to delete this group try it again.") return i.remove_group() break elif i is self.db.groups[-1]: sendmsg(conn, b"FAIL: Group doesn't exist " b"anymore. You should refresh") return self.db.save() self.send_db(conn, [])
def yhtj(): time=datetime.now() xx_time=time.timetuple() rq=time.date() z_rs=User.query.filter(User.is_admin!=1).count() m_rs=User.query.filter(User.create_time>rq.replace(day=1),User.is_admin!=1).count() r_rs=User.query.filter(User.create_time>rq.replace(day=xx_time.tm_mday),User.is_admin!=1).count() day_l=[] day_rs=[] for i in range(0,11): rz=xx_time.tm_mday-i d=rq.replace(day=rz) d2=d.replace(day=rz+1) day_l.append(d.strftime('%Y-%m-%d').strip()) day_rs.append(User.query.filter(d2>User.create_time,User.create_time > d).count()) day_rs.reverse() day_l.reverse() print(day_l) data={'day_rs':day_rs,'day_l':day_l} data['z_rs']=z_rs data['m_rs']=m_rs data['r_rs']=r_rs # r_rs=User.query.filter(User.create_time>rq.replace(day=1)).count() return render_template('admin/user_count.html',data=data)
def _add_suffix(self, msg): newmsg = {} print msg['date'] if msg['date'] is None: newmsg['date_int_i'] = 0 else: time = utils.parsedate_tz(msg['date']) if time is None: time = parse(msg['date']) if time is None: time = 0 time = time.timetuple()+ (0,) newmsg['date_int_i'] = int(utils.mktime_tz(time)) newmsg['date_s'] = msg['date'] newmsg['size_i'] = int(msg['size']) newmsg['stored_i'] = int(msg['stored']) newmsg['tags_s'] = msg['tags'] newmsg['flags_s'] = msg['flags'] newmsg['to_s'] = msg['to'] newmsg['from_s'] = msg['from'] newmsg['cc_s'] = msg['cc'] newmsg['bcc_s'] = msg['bcc'] newmsg['subject_s'] = msg['subject'] return newmsg
def show_comments(self, ticket, ignore_git_user=True): """ Shows the comments on a given ticket. INPUT: - ``ticket`` -- the ticket number - ``ignore_git_user`` -- whether to remove comments automatically added when the branch is updated. EXAMPLES:: sage: dev.trac.show_comments(100) # optional: internet ==================== was (6 years ago) fixed """ comments = [] changelog = self._anonymous_server_proxy.ticket.changeLog(int(ticket)) for time, author, field, oldvalue, newvalue, permanent in changelog: if field == 'comment' and newvalue and not (ignore_git_user and author == 'git'): comments.append((_timerep(datetime.datetime(*(time.timetuple()[:6]))), author, newvalue)) self._UI.show('\n'.join(['====================\n{0} ({1})\n{2}'.format(author, time, comment) for time, author, comment in reversed(comments)]))
def getFile(self, time): # seconds = mktime(strptime("%d-%d-%d %d:%d:%d" % (year, month, day, hour, minute, second), # "%d-%m-%Y %H:%M:%S")) seconds = int(mktime(time.timetuple())) return "%s/%d-second.mp3" % (self.getPath(time.year, time.month, time.day), seconds)
def time_to_timestamp(time): stamp = int(time.mktime(time.timetuple())) * 1000 return stamp
#for the different sites, create formatted file i=0 for f in listfile: print('****' + codeok[i]+' - '+f + '****') # initialization ods, year, month, day, data = [], [], [], [], [] from datetime import date readCSV = csv.reader(open(f), delimiter='\t') for row in readCSV: if (codeok[i]=='DMPSMPZ' or codeok[i]=='DMPSPAL' or codeok[i]=='DMPSSMR' or codeok[i]=='DMPSVAR' or codeok[i]=='DMPSVHL'): tim, dat, dat2, dat3, flag=row[0].split() if (float(flag)==1): tim=int(np.floor(float(tim))) time=date.fromordinal(tim) t=time.timetuple() year.append(t[0]) month.append(t[1]) day.append(t[2]) data.append(dat) ods.append(tim) else: tim, dat, flag=row[0].split() if (float(flag)==1): tim=int(np.floor(float(tim))) time=date.fromordinal(tim) t=time.timetuple() year.append(t[0]) month.append(t[1]) day.append(t[2]) data.append(dat)
print 'DST active on start_time, adding one hour to start_time' calendar_start_time = time.mktime((start_time+timedelta(hours=dst_active)).timetuple()) print 'epoch time at which calendar starts: %d' % calendar_start_time sids = db.service_ids() print '%d distinct service IDs' % len(sids) bitmask_for_sid = {} dst_mask = 0 for sid in sids : bitmask_for_sid[sid] = 0 for day_offset in range(32) : date = start_date + timedelta(days = day_offset) # Add one day because DST is in effect after 3am but all busses will drive after 3am thus in DST. time = timezone.localize(datetime.datetime.combine(date + timedelta(days=1), datetime.time.min)) if time.timetuple().tm_isdst == 1: dst_mask |= 1 << day_offset # db.date_range() is somewhat slow. # db.service_periods(sample_date) is slow because it checks its parameters with date_range(). # this is very inefficient, but run time is reasonable for now and it uses existing code. active_sids = db.service_periods(date) day_mask = 1 << day_offset print 'date {!s} has {:5d} active service ids. applying mask {:032b}'.format(date, len(active_sids), day_mask) for sid in active_sids : bitmask_for_sid[sid] |= day_mask #for sid in sids : # print '{:<5s} {:032b}'.format(sid, bitmask_for_sid[sid]) service_id_for_trip_id = {} for tid, sid in db.tripids_in_serviceperiods() :
(start_time + timedelta(hours=dst_active)).timetuple()) print 'epoch time at which calendar starts: %d' % calendar_start_time sids = db.service_ids() print '%d distinct service IDs' % len(sids) bitmask_for_sid = {} dst_mask = 0 for sid in sids: bitmask_for_sid[sid] = 0 for day_offset in range(32): date = start_date + timedelta(days=day_offset) # Add one day because DST is in effect after 3am but all busses will drive after 3am thus in DST. time = timezone.localize( datetime.datetime.combine(date + timedelta(days=1), datetime.time.min)) if time.timetuple().tm_isdst == 1: dst_mask |= 1 << day_offset # db.date_range() is somewhat slow. # db.service_periods(sample_date) is slow because it checks its parameters with date_range(). # this is very inefficient, but run time is reasonable for now and it uses existing code. active_sids = db.service_periods(date) day_mask = 1 << day_offset print 'date {!s} has {:5d} active service ids. applying mask {:032b}'.format( date, len(active_sids), day_mask) for sid in active_sids: bitmask_for_sid[sid] |= day_mask #for sid in sids : # print '{:<5s} {:032b}'.format(sid, bitmask_for_sid[sid]) service_id_for_trip_id = {}
cursor.addEntities(entities) # 2. Subscribe to the Trade and BestBidOffer Messages types = [ 'com.epam.deltix.timebase.messages.universal.PackageHeader', 'com.epam.deltix.timebase.messages.service.SecurityFeedStatusMessage' ] cursor.addTypes(types) # 3. Subscribe to the data stream(s) cursor.addStreams([stream]) # Define subscription start time time = datetime(2010, 1, 1, 0, 0) # Start time is Epoch time in milliseconds startTime = calendar.timegm(time.timetuple()) * 1000 # 4. Reset cursor to the subscription time cursor.reset(startTime) try: while cursor.next(): message = cursor.getMessage() # Message time is Epoch time in nanoseconds time = message.timestamp / 1e9 messageTime = datetime.utcfromtimestamp(time) if message.typeName == 'com.epam.deltix.timebase.messages.universal.PackageHeader': print("================================================") print("PackageHeader timestamp: " + str(messageTime) +
def on_view_time_clicked(self, view, item, ev, time): date = datetime.date(*time.timetuple()[:3]) self.emit('time-clicked', time, ev) self.emit('day-clicked', date, ev)
def date_to_unix(l_date): time = datetime.datetime(int(l_date[0]), int(l_date[0]), int(l_date[0]), int(l_date[0]), int(l_date[0])) return time.mktime(time.timetuple()) * 1000
def time2timestamp(time): timestamp = time.mktime(time.timetuple()) return timestamp