def test_utc(): estdt = T[0].astimezone(timezone('EST')) utcdt = sd.forceutc(estdt) assert utcdt == estdt assert utcdt.tzname() == 'UTC' d = T[0].date() assert sd.forceutc(d) == d
def test_utc(): pytz = pytest.importorskip('pytz') estdt = T[0].astimezone(pytz.timezone('EST')) utcdt = sd.forceutc(estdt) assert utcdt == estdt assert utcdt.tzname() == 'UTC' d = T[0].date() assert sd.forceutc(d) == d
def optical(vidfn,calfn,treq,terror_cam): """ quick-n-dirty load of optical data to corroborate with other tle and ncdf data """ #data, rawFrameInd,finf = goRead(vidfn,xyPix=(512,512),xyBin=(1,1), ut1Req=T,kineticraw=1/fps,startUTC=tstart) treq -= timedelta(seconds=terror_cam) treq = forceutc(treq) treq = treq.timestamp() vidfn = Path(vidfn).expanduser() if not vidfn.suffix == '.h5': raise IOError('{} needs to be HST HDF5 file'.format(vidfn)) with h5py.File(str(vidfn),'r',libver='latest') as f: tcam = f['ut1_unix'] i = find_nearest(tcam,treq)[0] if i==0 or i==tcam.size-1: logging.critical('requested time {} at or past edge of camera time {}'.format(datetime.utcfromtimestamp(treq),datetime.utcfromtimestamp(tcam[i]))) tcam = f['ut1_unix'][i] img = f['rawimg'][i,...] llacam = f['sensorloc'].value #%% map pixels to sky calfn = Path(calfn).expanduser() with h5py.File(str(calfn),'r',libver='latest') as f: az = f['/az'].value el = f['/el'].value return img, tcam, llacam, az,el
def test_yearint(): for t in T: yd, utsec = sd.datetime2yd(t) utsec2 = sd.dt2utsec(t) if isinstance(t, datetime.datetime): assert sd.yd2datetime(yd, utsec) == t elif isinstance(t, np.datetime64): assert sd.yd2datetime(yd, utsec) == sd.forceutc( t.astype(datetime.datetime)) else: assert sd.yd2datetime(yd, utsec).date() == t assert utsec == utsec2
def iridium_ncdf(fn,day,tlim,ellim, camlla): assert len(ellim) == 2,'must specify elevation limits' fn = Path(fn).expanduser() day = forceutc(day) #%% get all sats psuedo SV number with Dataset(str(fn),'r') as f: #psv_border = nonzero(diff(f['pseudo_sv_num'])!=0)[0] #didn't work because of consequtively reused psv #unique doesn't work because psv's can be recycled psv_border = (diff(f['time'])<0).nonzero()[0] + 1 #note unequal number of samples per satellite, +1 for how diff() is defined #%% iterate over psv, but remember different number of time samples for each sv. # since we are only interested in one satellite at a time, why not just iterate one by one, throwing away uninteresting results # qualified by crossing of FOV. #%% consider only satellites above az,el limits for this location #TODO assumes only one satellite meets elevation and time criteria lind = [0,0] #init for i in psv_border: lind = [lind[1],i] cind = arange(lind[0],lind[1]-1,dtype=int) # all times for this SV #now handle times for this SV t = array([day + timedelta(hours=h) for h in f['time'][cind].astype(float)]) if tlim: mask = (tlim[0] <= t) & (t <= tlim[1]) t = t[mask] cind = cind[mask] #now filter by az,el criteria az,el,r = eci2aer(f['pos_eci'][cind,:], camlla[0], camlla[1], camlla[2],t) if ellim and ((ellim[0] <= el) & (el <= ellim[1])).any(): # print(t) #print('sat psv {}'.format(f['pseudo_sv_num'][i])) eci = f['pos_eci'][cind,:] lat,lon,alt = eci2geodetic(eci,t) x,y,z = eci2ecef(eci,t) #print('ecef {} {} {}'.format(x,y,z)) ecef = DataFrame(index=t, columns=['x','y','z'], data=column_stack((x,y,z))) lla = DataFrame(index=t, columns=['lat','lon','alt'], data=column_stack((lat,lon,alt))) aer = DataFrame(index=t, columns=['az','el','srng'], data=column_stack((az,el,r))) return ecef,lla,aer,eci print('no FOV crossings for your time span were found.') return (None,None)
def datetime2unix(T): """ converts datetime to UT1 unix epoch time """ T = atleast_1d(T) ut1_unix = empty(T.shape,dtype=float) for i,t in enumerate(T): if isinstance(t,(datetime,datetime64)): pass elif isinstance(t,str): try: ut1_unix[i] = float(t) #it was ut1_unix in a string continue except ValueError: t = parse(t) #datetime in a string elif isinstance(t,(float,int)): #assuming ALL are ut1_unix already return T else: raise TypeError('I only accept datetime or parseable date string') ut1_unix[i] = forceutc(t).timestamp() #ut1 seconds since unix epoch, need [] for error case return ut1_unix
def readwspr(fn, callsign: str, band: int, call2, tlim) -> DataFrame: fn = Path(fn).expanduser() callsign = callsign.upper() if isinstance(call2, (tuple, list)): call2 = [c.upper() for c in call2] if fn.suffix == '.csv': # .gz not readable for some reason dat = read_csv( fn, sep=',', index_col=False, usecols=[1, 2, 4, 6, 8, 10, 11, 12, 14], names=[ 'tut', 'rxcall', 'snr', 'txcall', 'power', 'distkm', 'az', 'band', 'code' ], dtype={ 'tut': int, 'rxcall': str, 'snr': int, 'txcall': str, 'power': int, 'distkm': int, 'az': int, 'band': int, 'code': int }, #nrows=1000) memory_map=True, ) dat['t'] = [forceutc(datetime.utcfromtimestamp(u)) for u in dat['tut']] elif fn.suffix == '.tsv': dat = read_csv( fn, sep='\t', index_col=False, usecols=[0, 1, 2, 3, 5, 6, 8, 9, 10], names=[ 't', 'txcall', 'band', 'snr', 'txgrid', 'power', 'rxcall', 'rxgrid', 'distkm' ], dtype={ 't': str, 'txcall': str, 'band': float, 'snr': int, 'txgrid': str, 'power': int, 'rxcall': str, 'rxgrid': str, 'distkm': int }, #nrows=1000) ) #%% sanitization dat['band'] = dat['band'].astype(int) dat['t'] = [ forceutc(datetime.strptime(t.strip(), '%Y-%m-%d %H:%M')) for t in dat['t'] ] dat['rxcall'] = dat['rxcall'].str.strip() dat['txcall'] = dat['txcall'].str.strip() elif fn.suffix == '.h5': #assume preprocessed data """ here we assume the hierarchy is by callsign """ dat = {} with h5py.File(str(fn), 'r', libver='latest') as f: for c in f['/']: # iterate over callsign dat[c] = DataFrame(index=f[f'{c}/t'], columns=['snr', 'band'], data=column_stack(f[f'{c}/snr'], f[f'{c}/band'])) else: raise ValueError(f'{fn} is not a known type to this program') print(f'done loading {fn}') #%% extract only data relevant to our callsign on selected bands i = in1d(dat['band'], band) & ((dat['rxcall'] == callsign) | (dat['txcall'] == callsign)) if call2 is not None: i &= in1d(dat['rxcall'], call2) if tlim is not None: tlim = [forceutc(parse(t)) for t in tlim] i &= (dat.t >= tlim[0]) & (dat.t <= tlim[1]) dat = dat.loc[i, :] #%% sanitize multiple reports in same minute print('cleaning data') dat = cleandistortion(dat, call2) #%% compensate SNR -> SNR/Hz/W dat['snr'] += round(10 * log10(refbw)) # snr/Hz dat['snr'] += refdbm - dat['power'] # snr/Hz/W return dat