def rd_stateframe(s, sf_num, n_expected): """Does multiple reads of opened connection s until n_expected bytes are read. sf_num is sent to the ACC to indicate which stateframe to read (1 normally) """ totlen = 0 totdata = [] data = "" sf_pck = struct.pack(">i", sf_num) s.settimeout(0.5) # sys.stdout.write('+') # sys.stdout.flush() # Flush stdout (/tmp/schedule.log) so we can see the output. try: s.send(sf_pck) # sys.stdout.write('.') # sys.stdout.flush() # Flush stdout (/tmp/schedule.log) so we can see the output. while totlen < n_expected: data = s.recv(8192) totdata.append(data) totlen = sum([len(i) for i in totdata]) # sys.stdout.write('-') # sys.stdout.flush() # Flush stdout (/tmp/schedule.log) so we can see the output. except socket.timeout: print Time.now().iso, "Socket time-out when reading stateframe from ACC" return "".join(totdata)
def rd_solpwr(url='http://data.magnumenergy.com/MW5127'): '''Reads the data from the solar power station at Ant 12 or 13, which is sent to the Magnum Energy web site and they then serve it to us at the address: Ant12: http://data.magnumenergy.com/MW5127. Ant13: http://data.magnumenergy.com/MW5241. Now returns a single dictionary, for whichever station is pointed to by url ''' # Read and decode the information from the power station at 12 try: f = urllib2.urlopen(url,timeout=0.2) except: # Timeout error print Time.now().iso,'Solar Power connection timed out' solpwr = {} return solpwr try: lines = f.readlines() except: print Time.now().iso,'Solar Power readlines timed out' lines = None f.close() solpwr = {} if lines is None: return solpwr for i,line in enumerate(lines): if i > 185: break if line.find('Data Date:<') > 0: t = Time(lines[i+2][23:23+19]) solpwr.update({'Time':t.lv}) elif line.find('State of Charge:<') > 0: idx = lines[i+2].find('%') solpwr.update({'Charge':int(lines[i+2][:idx])}) elif line.find('volts / amps') > 0: args = lines[i+2].split(' ') solpwr.update({'Volts':float(args[0])}) solpwr.update({'Amps':float(args[3])}) elif line.find('Amp Hours') > 0: solpwr.update({'AmpHours':int(lines[i+2].split(' ')[0])}) elif line.find('Battery Temperature') > 0: btemp = lines[i][lines[i].find('<td>')+4:lines[i].find('°C')] try: solpwr.update({'BatteryTemp':int(btemp)}) except: # When value is below 0 C, web page returns < 0 C, which cannot be set to int, # so just set to -1 C on error. solpwr.update({'BatteryTemp':-1}) elif line.find('Transformer Temperature') >0: solpwr.update({'TransformerTemp':int(lines[i].split('°C')[0][-2])}) elif line.find('FET Temperature') >0: solpwr.update({'FETTemp':int(lines[i].split('°C')[0][-2])}) return solpwr
def rd_solpwr(url="http://data.magnumenergy.com/MW5127"): """Reads the data from the solar power station at Ant 12 or 13, which is sent to the Magnum Energy web site and they then serve it to us at the address: Ant12: http://data.magnumenergy.com/MW5127. Ant13: http://data.magnumenergy.com/MW5241. Now returns a single dictionary, for whichever station is pointed to by url """ # Read and decode the information from the power station at 12 try: f = urllib2.urlopen(url, timeout=0.2) except: # Timeout error print Time.now().iso, "Solar Power connection timed out" solpwr = {} return solpwr try: lines = f.readlines() except: print Time.now().iso, "Solar Power readlines timed out" lines = None f.close() solpwr = {} if lines is None: return solpwr for i, line in enumerate(lines): if i > 185: break if line.find("Data Date:<") > 0: t = Time(lines[i + 2][23 : 23 + 19]) solpwr.update({"Time": t.lv}) elif line.find("State of Charge:<") > 0: idx = lines[i + 2].find("%") solpwr.update({"Charge": int(lines[i + 2][:idx])}) elif line.find("volts / amps") > 0: args = lines[i + 2].split(" ") solpwr.update({"Volts": float(args[0])}) solpwr.update({"Amps": float(args[3])}) elif line.find("Amp Hours") > 0: solpwr.update({"AmpHours": int(lines[i + 2].split(" ")[0])}) elif line.find("Battery Temperature") > 0: btemp = lines[i][lines[i].find("<td>") + 4 : lines[i].find("°C")] try: solpwr.update({"BatteryTemp": int(btemp)}) except: # When value is below 0 C, web page returns < 0 C, which cannot be set to int, # so just set to -1 C on error. solpwr.update({"BatteryTemp": -1}) elif line.find("Transformer Temperature") > 0: solpwr.update({"TransformerTemp": int(lines[i].split("°C")[0][-2])}) elif line.find("FET Temperature") > 0: solpwr.update({"FETTemp": int(lines[i].split("°C")[0][-2])}) return solpwr
def SaveLoRX(self): # Send saved Ant 14 delays as Ant 15 to the SQL database and the ACC (if the data source is not 'Simulation') # Note, ONLY Ant 14 delay is changed, and it is ascribed to Ant 15. No other delay changes are made. if self.data_source == 'Data': if (Time.now().mjd - self.time.mjd) > 1.0: question = "Warning: Data more than a day old. Are you sure you want to save delays to SQL and ACC?" else: question = "Save delays to SQL and ACC?" import cal_header as ch delays = self.delays[0] - self.delays delays = np.append(delays,self.delays[0]) # Have to change the sign of Ant 14 Y-X delay, hence the minus sign xydelays = np.append(self.xydelays,-float(self.dla14.get())) # Do not change delays where both delays and xydelays are zero # which is taken as a missing antenna bad, = np.where(self.delays == 0) bad2, = np.where(self.xydelays == 0) idx1,idx2 = common_val_idx(bad,bad2) delays[bad[idx1]] = 0.0 # Check that the delays to all antennas except Ant 14 are zero, or give warning if not for i in range(13): if delays[i] != 0.0: question = "Some Ant1-13 delays are not 0, but will NOT be updated. Save anyway?" break if xydelays[i] != 0.0: question = "Some Ant1-13 delays are not 0, but will NOT be updated. Save anyway?" break if askyesno("Write Delays",question): # All Y-X delays need a sign flip, hence the minus sign ch.dla_update2sql(delays,-xydelays,lorx=True) #ch.dla_update2sql(-delays,xydelays) # 300 MHz design uses flipped signs! ch.dla_censql2table()
def rstn2ant(frq,flux,fmhz,t=None,twometer=True): ''' Takes 9-element list of frequencies and corresponding flux densities from a call to rd_rstnflux(), and fits a 2nd-degree polynomial to the last 6 frequency-flux density pairs. Returns the values of the polynomial fit, adjusted for the 2.1-m antenna nominal beam size (if twometer ==True), evaluated at the frequencies given in the supplied frequency list fmhz. Optional input parameter t is a Time() object with the date of the RSTN fluxes, which is used to determine the solar disk radius, used in the beam-size adjustment. If omitted, today's date is used. ''' if t is None: t = Time.now() # Perform 2nd-degree polynomial fit to input flux density values # Note that p is a polynomial object that returns the flux values # at the frequencies in its argument idx = np.isfinite(frq[3:]) & np.isfinite(flux[3:]) p = np.poly1d(np.polyfit(frq[3:][idx],flux[3:][idx],2)) # Get size of this day's solar disk pa,b0,r = sun_pos.get_pb0r(t.mjd,arcsec=True) rp = (r+10.)/960. # Radius of radio Sun in units of nominal 960" (10" added for radio limb) ''' Convert total flux to flux measured by the antenna, as follows: ; ; Assuming we are pointed exactly at disk center, and that the Sun ; is a nice, flat disk (valid only at high frequencies, but that ; is where it makes the most difference, and then only for 2-m ants) ; the actual flux measured is a gaussian (primary beam) truncated ; by the solar disk. The measured flux S is related to the total ; flux, S_0, by ; / R_0 ; S = 2 S_0 / R_0^2 \ r exp[-(r/alpha)^2] dr ; / 0 ; ; where R_0 is the angular solar radius, alpha is the 1/e primary ; beam width, and the integral over azimuthal angle: ; / 2 pi ; \ dphi = 2 pi ; / 0 ; has already been performed. ; ; The integral can be evaluated to yield: ; S = S_0 (alpha/R_0)^2 { 1 - exp[-(R_0/alpha)^2]} ; ; The half-power-beam-width HPBW = 2 sqrt(ln 2) alpha = 46.5'/f_GHz ; for the 27m dishes, or 13.5 times this for 2m dishes. Thus, ; ; alpha/R_0 = 27.9'/f_GHz/ 16.0' r' = 1.745/(f_GHz r') ; ; for the 27 m antennas, or 12.857 times this for 2.1m dishes, where ; r' is the angular solar radius in units of the nominal 16.0'(960"). ''' const = 1.745 if twometer: const = const*12.857 arg = (const/(fmhz*rp/1000.))**2 s = p(fmhz)*arg*(1.0-np.exp(-1./arg)) return s
def dpp_status(): datstr = Time.now().iso[:10].replace('-','') out1 = sort(glob.glob('/data1/IDB/IDB'+datstr+'*'))[-5:] command = 'python dpp_eth_mon.py' out2 = subprocess.check_output(command.split(),stderr=subprocess.STDOUT).split('\n')[0] command = 'ps -C dppxmp4' ps = subprocess.Popen(command.split(),stdout=subprocess.PIPE) out3 = ps.communicate()[0].split('\n')[1] return out1, out2, out3
def eovsa_lst(tin=None): ''' Input is a Time() object (or None to use current time). Returns local sidereal time for EOVSA. NB: Now returns LST in radians, not as an RA_Angle() ''' if tin is None: tin = Time.now() aa = eovsa_array() aa.set_jultime(tin.jd) return aa.sidereal_time()
def date_next(self, event): w = event.widget date = Time(Time(w.get()).mjd + 1, format='mjd').iso[0:10] w.delete(0,Tk.END) w.insert(0, date) self.refcal_btn.configure(state=Tk.DISABLED) self.refcalset_btn.configure(state=Tk.DISABLED) self.phacal_btn.configure(state=Tk.DISABLED) #self.pc_resultbox.delete(0,Tk.END) fig, ax = self.ab_fig_info im = ax.pcolormesh(np.arange(14),np.arange(35),np.zeros((34,13))) self.ab_text.set_text('No scan selected') ax.set_title('') fig.suptitle('') self.ab_fig_info[0].canvas.draw() self.pc_scanbox.delete(0,Tk.END) self.resultvar.set('No Scan Selected') self.ref_selected = None self.scan_selected = None self.band_selected = None
def filter_hed_to_eosin(np_img, output_type="uint8"): """ Obtain Eosin channel from HED NumPy array and rescale it (for example, to 0 to 255 for uint8) for increased contrast. Args: np_img: HED image as a NumPy array. output_type: Type of array to return (float or uint8). Returns: NumPy array for Eosin channel. """ t = Time() eosin = np_img[:, :, 1] if output_type == "float": eosin = sk_exposure.rescale_intensity(eosin, out_range=(0.0, 1.0)) else: eosin = (sk_exposure.rescale_intensity(eosin, out_range=(0, 255))).astype("uint8") util.np_info(eosin, "HED to Eosin", t.elapsed()) return eosin
def filter_rgb_to_hed(np_img, output_type="uint8"): """ Filter RGB channels to HED (Hematoxylin - Eosin - Diaminobenzidine) channels. Args: np_img: RGB image as a NumPy array. output_type: Type of array to return (float or uint8). Returns: NumPy array (float or uint8) with HED channels. """ t = Time() hed = sk_color.rgb2hed(np_img) if output_type == "float": hed = sk_exposure.rescale_intensity(hed, out_range=(0.0, 1.0)) else: hed = (sk_exposure.rescale_intensity(hed, out_range=(0, 255))).astype("uint8") util.np_info(hed, "RGB to HED", t.elapsed()) return hed
def get_sql_info(trange): ''' Get all antenna information from the SQL database for a given timerange, including TrackFlag and Parallactic Angle ''' cursor = db.get_cursor() sqldict = db.get_dbrecs(cursor, dimension=15, timestamp=trange) azeldict = stateframe.azel_from_sqldict(sqldict) time = Time(sqldict['Timestamp'][:, 0].astype(int), format='lv') azeldict.update({'Time': time}) cursor.close() return azeldict
def apply_unrot_new(filename): import read_idb as ri import dbutil as db import copy from util import lobe, Time import matplotlib.pylab as plt import numpy as np blah = np.load('/common/tmp/Feed_rotation/20171223001448_delay_phase.npz') dph = blah['dph'] fghz = blah['fghz'] xi_rot = blah['xi_rot'] out = ri.read_npz([filename]) nbl, npol, nfrq, nt = out['x'].shape # Correct data for phase #n = [0,0,0,1,1,0,1,0,1,1,0,0,0] for i in range(13): a1 = lobe(dph[i] - dph[13]) a2 = -dph[13] - xi_rot a3 = dph[i] - xi_rot + np.pi for j in range(nt): out['x'][ri.bl2ord[i,13],1,:,j] *= np.exp(1j*a1) out['x'][ri.bl2ord[i,13],2,:,j] *= np.exp(1j*a2) out['x'][ri.bl2ord[i,13],3,:,j] *= np.exp(1j*a3) trange = Time(out['time'][[0,-1]],format='jd') times, chi = db.get_chi(trange) nskip = len(times)/nt chi = np.transpose(chi[::nskip+1]) chi[[8,9,10,12]] = 0.0 outp = copy.deepcopy(out) for i in range(nt): for k in range(13): outp['x'][ri.bl2ord[k,13],0,:,i] = out['x'][ri.bl2ord[k,13],0,:,i]*np.cos(chi[k,i]) + out['x'][ri.bl2ord[k,13],3,:,i]*np.sin(chi[k,i]) outp['x'][ri.bl2ord[k,13],2,:,i] = out['x'][ri.bl2ord[k,13],2,:,i]*np.cos(chi[k,i]) + out['x'][ri.bl2ord[k,13],1,:,i]*np.sin(chi[k,i]) outp['x'][ri.bl2ord[k,13],3,:,i] = out['x'][ri.bl2ord[k,13],3,:,i]*np.cos(chi[k,i]) - out['x'][ri.bl2ord[k,13],0,:,i]*np.sin(chi[k,i]) outp['x'][ri.bl2ord[k,13],1,:,i] = out['x'][ri.bl2ord[k,13],1,:,i]*np.cos(chi[k,i]) - out['x'][ri.bl2ord[k,13],2,:,i]*np.sin(chi[k,i]) amp0 = np.abs(np.sum(out['x'][ri.bl2ord[:13,13]],3)) amp2 = np.abs(np.sum(outp['x'][ri.bl2ord[:13,13]],3)) f, ax = plt.subplots(4,13) for i in range(13): for j in range(4): ax[j,i].cla() ax[j,i].plot(fghz, amp0[i,j],'.',color='lightgreen') ax[j,i].plot(fghz, amp2[i,j],'k.') ax[j,i].set_ylim(0,10) ph0 = np.angle(np.sum(out['x'][ri.bl2ord[:13,13]],3)) ph2 = np.angle(np.sum(outp['x'][ri.bl2ord[:13,13]],3)) f, ax = plt.subplots(4,13) for i in range(13): for j in range(4): ax[j,i].cla() ax[j,i].plot(fghz, ph0[i,j],'.',color='lightgreen') ax[j,i].plot(fghz, ph2[i,j],'k.')
def rstnfluxfrom45daynoaa(dt): """This extracts the requested day's RSTN noon flux data from: ftp://ftp.swpc.noaa.gov/pub/lists/radio/45day_rad.txt A list is returned with each element as follows: 0 - timestamp: Astropy Time which is the date on which the data was collected. This should match dt 1 - freq: A float32 numpy array containing the 9 frequencies in GHz 2 - data: The flux data which is a 9x7 int16 numpy array. If the ftp failed or the specified date is not found None is returned.""" noaa_url = 'ftp://ftp.swpc.noaa.gov/pub/lists/radio/45day_rad.txt' try: f = urllib2.urlopen(noaa_url) lines = f.readlines() except: print "Could not read from ", noaa_url return None # The next several blocks below can be shortened by the more pythonic code: for i, line in enumerate(lines): # Provides counter "i" line = line.strip() if line[:3] == 'MHZ': break if i + 1 == len(lines): print "No data found in ", noaa_url return None lines = lines[i + 1:] lines = np.array(lines) # Converts to numpy array, for where() clean_lines = lines[np.where(lines != '\n')] # Eliminates all empty lines clean_lines = [l.replace('\n', '') for l in clean_lines] # Removes \n in remaining lines for i in range(0, len(clean_lines), 10): datestr = "%04d-%02d-%02d" % (int( clean_lines[i][0:4]), strptime( clean_lines[i][5:8], '%b').tm_mon, int(clean_lines[i][9:])) timestamp = Time(datestr, out_subfmt='date') if np.floor(dt.mjd) == np.floor(timestamp.mjd): data = np.zeros((9, 7), dtype=np.int16) for j in range(1, 10): d = clean_lines[i + j].split() data[j - 1] = list(map(int, d[1:8])) print "Data successfully read from ", noaa_url, " for date ", dt.iso return [timestamp, freq, data] print "No data found for data ", dt.iso, " in ", noaa_url return None
def filter_rgb_to_grayscale(np_img, output_type="uint8"): """ Convert an RGB NumPy array to a grayscale NumPy array. Shape (h, w, c) to (h, w). Args: np_img: RGB Image as a NumPy array. output_type: Type of array to return (float or uint8) Returns: Grayscale image as NumPy array with shape (h, w). """ t = Time() # Another common RGB ratio possibility: [0.299, 0.587, 0.114] grayscale = np.dot(np_img[..., :3], [0.2125, 0.7154, 0.0721]) if output_type != "float": grayscale = grayscale.astype("uint8") util.np_info(grayscale, "Gray", t.elapsed()) return grayscale
def weather(attempt=0): '''Read the http://wx.cm.pvt/latestsampledata.xml page and take the title and the information and put it in dictionary form''' try: f = urllib2.urlopen('http://wx.cm.pvt/latestsampledata.xml',timeout=0.4) except: # Timeout error print Time.now().iso,'Weather connection timed out' return {} try: #tree = ET.parse(f) line = f.readline() if line.find('</oriondata>') == -1: # Line is often truncated, so fix it if possible line = line[:line.find('</o')]+'</oriondata>' print Time.now().iso,'Fixed Weather info' #tree = ET.XML(line) except: # Error reading weather info, so return blank dictionary print Time.now().iso,'Problem reading Weather info' return {} f.close() #root = tree.getroot() try: root = ET.XML(line) except: if attempt == 0: # Try again, then bail if it doesn't work return weather(attempt=1) # Error reading weather info, so return blank dictionary print Time.now().iso,'Problem parsing Weather info' return {} index = 0 ovro_dict = {} for element in root.findall('meas'): name = element.get('name') text = root[index].text ovro_dict.update({name : text}) index = index + 1 # Convert pressure in inches Hg to mBar try: temp = ovro_dict['mtRawBaromPress'] temp = float(temp) * 33.8637526 except: return ovro_dict ovro_dict['mtRawBaromPress'] = str(temp) return ovro_dict
def plotsfdata_anta(fld, trange, plottitle=None, ignore=None, interval=None, rng=None, ylabel="None"): '''This function takes in a list of stateframe parameters specific to antenna A (14) and a time range and plots the parameters against time. fld is a list of parameters trange is a list of two times represted as an iso string, the start of the data end end of the data to be retrieved. plottitle is an optional title that is to be display on the plot. ignore is an optional parameter that will remove any data points whose y values have the same value as ignore. If not present all values will be plotted It returns a dictionary with the extracted data. If an error ocuured it returns None''' import matplotlib.pyplot as plt data, msg = loadsfdata_anta(fld, trange, interval) print msg if msg != "Success": return None if ignore != None: for f in fld: data[f] = np.ma.masked_where(data[f] == ignore, data[f]) dt = np.array(Time(data['Timestamp'].astype(float), format='lv').isot, dtype='datetime64') handles = [] for f in fld: a, = plt.plot(dt, data[f].astype(float), label=f) handles.append(a) if ylabel is None: plt.ylabel('Value') else: plt.ylabel(ylabel) plt.xlabel('Universal Time') if rng is not None: plt.ylim(rng[0], rng[1]) if plottitle is not None: plt.title(plottitle) plt.legend(handles=handles) return data
def get_gain_corr(trange, tref=None, fghz=None): ''' Calls get_gain_state() for a timerange and a reference time, and returns the gain difference table to apply to data in the given timerange. If no reference time is provided, the gain state is referred to the nearest earlier REFCAL. Returns a dictionary containing: antgain Array of size (15, 2, nbands, nt) = (nant, npol, nbands, nt) times A Time() object corresponding to the times in antgain ''' if tref is None: # No reference time specified, so get nearest earlier REFCAL xml, buf = ch.read_cal(8, t=trange[0]) tref = Time(stf.extract(buf, xml['Timestamp']), format='lv') # Get the gain state at the reference time (actually median over 1 minute) trefrange = Time([tref.iso, Time(tref.lv + 61, format='lv').iso]) ref_gs = get_gain_state(trefrange) # refcal gain state for 60 s # Get median of refcal gain state (which should be constant anyway) ref_gs['h1'] = np.median(ref_gs['h1'], 1) ref_gs['h2'] = np.median(ref_gs['h2'], 1) ref_gs['v1'] = np.median(ref_gs['v1'], 1) ref_gs['v2'] = np.median(ref_gs['v2'], 1) # Get the gain state of the requested timerange src_gs = get_gain_state(trange) # solar gain state for timerange of file nt = len(src_gs['times']) nbands = src_gs['dcmattn'].shape[2] antgain = np.zeros((15, 2, nbands, nt), np.float32) # Antenna-based gains vs. band for i in range(15): for j in range(nbands): antgain[i, 0, j] = src_gs['h1'][i] + src_gs['h2'][i] - ref_gs[ 'h1'][i] - ref_gs['h2'][i] + src_gs['dcmattn'][ i, 0, j] - ref_gs['dcmattn'][i, 0, j] antgain[i, 1, j] = src_gs['v1'][i] + src_gs['v2'][i] - ref_gs[ 'v1'][i] - ref_gs['v2'][i] + src_gs['dcmattn'][ i, 1, j] - ref_gs['dcmattn'][i, 1, j] return {'antgain': antgain, 'times': src_gs['times']}
def findscans(trange): '''Identify phasecal scans from UFDB files ''' import dbutil import dump_tsys tstart, tend = trange.lv.astype(int).astype(str) cursor = dbutil.get_cursor() verstr = dbutil.find_table_version(cursor, tstart, True) query = 'select Timestamp,Project,SourceID from hV'+verstr+'_vD1 where left(Project,8) = "PHASECAL" and Timestamp between '+tstart+' and '+tend+' order by Timestamp' projdict, msg = dbutil.do_query(cursor, query) if msg != 'Success': return {'msg':msg} if projdict == {}: return {'msg':'No PHASECAL scans for this day'} tsint = projdict['Timestamp'].astype(int) # Check UFDB file to get duration ufdb = dump_tsys.rd_ufdb(Time(int(tstart),format='lv')) mjd0 = int(Time(int(tstart),format='lv').mjd) mjdnow = int(Time.now().mjd) if mjd0 < mjdnow: # The date is a previous day, so read a second ufdb file # to ensure we have the whole local day try: ufdb2 = dump_tsys.rd_ufdb(Time(int(tstart)+86400.,format='lv')) for key in ufdb.keys(): ufdb.update({key: np.append(ufdb[key], ufdb2[key])}) except: # No previous day, so just skip it. pass ufdb_times = ufdb['ST_TS'].astype(float).astype(int) idx = nearest_val_idx(tsint,ufdb_times) fpath = '/data1/eovsa/fits/UDB/' + trange[0].iso[:4] + '/' dur = [] file = [] for i in idx: dur.append(((ufdb['EN_TS'].astype(float) - ufdb['ST_TS'].astype(float))[i])/60.) file.append(fpath+ufdb['FILE'][i]) # Fix source ID to remove nulls srclist = np.array([str(i.replace('\x00','')) for i in projdict['SourceID']]) return {'Timestamp': tsint, 'SourceID': srclist, 'duration': np.array(dur), 'filelist':np.array(file), 'msg': msg}
def filter_otsu_threshold(np_img, output_type="uint8"): """ Compute Otsu threshold on image as a NumPy array and return binary image based on pixels above threshold. Args: np_img: Image as a NumPy array. output_type: Type of array to return (bool, float, or uint8). Returns: NumPy array (bool, float, or uint8) where True, 1.0, and 255 represent a pixel above Otsu threshold. """ t = Time() otsu_thresh_value = sk_filters.threshold_otsu(np_img) otsu = (np_img > otsu_thresh_value) if output_type == "bool": pass elif output_type == "float": otsu = otsu.astype(float) else: otsu = otsu.astype("uint8") * 255 util.np_info(otsu, "Otsu Threshold", t.elapsed()) return otsu
def filter_rag_threshold(np_img, compactness=10, n_segments=800, threshold=9): """ Use K-means segmentation to segment RGB image, build region adjacency graph based on the segments, combine similar regions based on threshold value, and then output these resulting region segments. Args: np_img: Binary image as a NumPy array. compactness: Color proximity versus space proximity factor. n_segments: The number of segments. threshold: Threshold value for combining regions. Returns: NumPy array (uint8) representing 3-channel RGB image where each segment has been colored based on the average color for that segment (and similar segments have been combined). """ t = Time() labels = sk_segmentation.slic(np_img, compactness=compactness, n_segments=n_segments) g = sk_future.graph.rag_mean_color(np_img, labels) labels2 = sk_future.graph.cut_threshold(labels, g, threshold) result = sk_color.label2rgb(labels2, np_img, kind='avg') util.np_info(result, "RAG Threshold", t.elapsed()) return result
def a14_wscram(trange): ''' Get the Antenna 14 windscram state for a given time range (returns times and wsram state, 0 = not in wind scram, 1 = in windscram) ''' tstart, tend = [str(i) for i in trange.lv] cursor = get_cursor() query = 'select Timestamp,Ante_Fron_Wind_State from fV65_vD15 where (I15 = 13) and Timestamp between ' + tstart + ' and ' + tend data, msg = do_query(cursor, query) cursor.close() if msg == 'Success': times = Time(data['Timestamp'].astype('int'), format='lv') wscram = data['Ante_Fron_Wind_State'] return times, wscram
def writerstnprev2sql(): """This routine extracts and writes the previous days RSTN flux values from NOAA and writes them to SQL.""" nt = Time.now() t = Time(np.floor(nt.mjd) - 0.875, format='mjd') data = rstnfluxfromcurrentnoaa() if data is None: #No data extracted, display error message print t.iso + ": No data found." else: #See if data already in database pd, sqlt = sql2rstn(t) if sqlt is None or np.floor(t.mjd) != np.floor(sqlt.mjd): if ch.rstnflux2sql(data, t): #If data written to database, display success message print t.iso + ": Data successfully written to database." else: #If data failed to write, show fail message print t.iso + ": Failed to write data to database." else: print t.iso + ": Data already in database."
def sql2phacalX(trange, *args, **kwargs): '''Supply a timestamp in Time format, return the closest phacal data. If a time range is provided, return records within the time range.''' import cal_header as ch import stateframe as stf xml, bufs = ch.read_calX(9, t=trange, *args, **kwargs) if isinstance(bufs, list): phacals = [] for i, buf in enumerate(bufs): try: phacal_flag = stf.extract(buf, xml['Phacal_Flag']) fghz = stf.extract(buf, xml['Fghz']) sigma = stf.extract(buf, xml['Phacal_Sigma']) timestamp = Time(stf.extract(buf, xml['Timestamp']), format='lv') tbg = Time(stf.extract(buf, xml['T_beg']), format='lv') ted = Time(stf.extract(buf, xml['T_end']), format='lv') pha = stf.extract(buf, xml['Phacal_Pha']) amp = stf.extract(buf, xml['Phacal_Amp']) tmp = stf.extract(buf, xml['MBD']) poff, pslope = tmp[:, :, 0], tmp[:, :, 1] flag = stf.extract(buf, xml['Flag'])[:, :, 0] t_ref = Time(stf.extract(buf, xml['T_refcal']), format='lv') phacals.append({'pslope': pslope, 't_pha': timestamp, 'flag': flag, 'poff': poff, 't_ref': t_ref, 'phacal': {'pha': pha, 'amp': amp, 'flag': phacal_flag, 'fghz': fghz, 'sigma': sigma, 'timestamp': timestamp, 't_bg': tbg, 't_ed': ted}}) except: print 'failed to load record {} ---> {}'.format(i + 1, Time(stf.extract(buf, xml['Timestamp']), format='lv').iso) return phacals elif isinstance(bufs, str): phacal_flag = stf.extract(bufs, xml['Phacal_Flag']) fghz = stf.extract(bufs, xml['Fghz']) sigma = stf.extract(bufs, xml['Phacal_Sigma']) timestamp = Time(stf.extract(bufs, xml['Timestamp']), format='lv') tbg = Time(stf.extract(bufs, xml['T_beg']), format='lv') ted = Time(stf.extract(bufs, xml['T_end']), format='lv') pha = stf.extract(bufs, xml['Phacal_Pha']) amp = stf.extract(bufs, xml['Phacal_Amp']) tmp = stf.extract(bufs, xml['MBD']) poff, pslope = tmp[:, :, 0], tmp[:, :, 1] flag = stf.extract(bufs, xml['Flag'])[:, :, 0] t_ref = Time(stf.extract(bufs, xml['T_refcal']), format='lv') return {'pslope': pslope, 't_pha': timestamp, 'flag': flag, 'poff': poff, 't_ref': t_ref, 'phacal': {'pha': pha, 'amp': amp, 'flag': phacal_flag, 'fghz': fghz, 'sigma': sigma, 'timestamp': timestamp, 't_bg': tbg, 't_ed': ted}}
def filter_adaptive_equalization(np_img, nbins=256, clip_limit=0.01, output_type="uint8"): """ Filter image (gray or RGB) using adaptive equalization to increase contrast in image, where contrast in local regions is enhanced. Args: np_img: Image as a NumPy array (gray or RGB). nbins: Number of histogram bins. clip_limit: Clipping limit where higher value increases contrast. output_type: Type of array to return (float or uint8). Returns: NumPy array (float or uint8) with contrast enhanced by adaptive equalization. """ t = Time() adapt_equ = sk_exposure.equalize_adapthist(np_img, nbins=nbins, clip_limit=clip_limit) if output_type == "float": pass else: adapt_equ = (adapt_equ * 255).astype("uint8") util.np_info(adapt_equ, "Adapt Equalization", t.elapsed()) return adapt_equ
def mask_info(): """ Display information (such as properties) about masks images. """ #To write the information in Excel sheet import openpyxl wb = openpyxl.load_workbook(xlsx_name) sheet = wb.get_sheet_by_name(sheet_name) print(wb) t = Time() num_train_images = get_num_training_slides() slide_num = 0 sheet['A' + str(slide_num + 1)].value = 'slide_num' sheet['B' + str(slide_num + 1)].value = 'level_count' sheet['C' + str(slide_num + 1)].value = 'level_dimensions' sheet['D' + str(slide_num + 1)].value = 'level_downsamples' sheet['E' + str(slide_num + 1)].value = 'dimensions' for slide_num in range(1, num_train_images): slide_filepath = get_training_slide_path(slide_num) print("\nOpening Slide #%d: %s" % (slide_num, slide_filepath)) sheet['A' + str(slide_num + 1)].value = slide_num slide = open_slide(slide_filepath) print("Level count: %d" % slide.level_count) sheet['B' + str(slide_num + 1)].value = slide.level_count print("Level dimensions: " + str(slide.level_dimensions)) sheet['C' + str(slide_num + 1)].value = str(slide.level_dimensions) print("Level downsamples: " + str(slide.level_downsamples)) sheet['D' + str(slide_num + 1)].value = str(slide.level_downsamples) print("Dimensions: " + str(slide.dimensions)) sheet['E' + str(slide_num + 1)].value = str(slide.dimensions) #to save the info in excel sheet wb.save(xlsx_name) t.elapsed_display()
def eovsa_ha(src, tin=None): ''' Input is a Time() object (or None to use current time). Returns the hour angle of the provided src for an observer at OVRO, for the time in the Time() object tin, or if not given, for the current moment. ''' if tin is None: tin = Time.now() ha = eovsa_lst(tin) - src.ra if ha > pi: ha = ha - 2 * pi elif ha < -pi: ha = 2 * pi + ha return ha
def pcal_anal(trange,path=None): import os import os.path import socket import glob import read_idb as ri if path is None: path = '' out = findfile(trange) host = socket.gethostname() filelist = out['scanlist'] statuslist = out['status'] starttimelist = out['tstlist'] nscans = len(filelist) print 'Found',nscans,'scans to process.' for i in range(nscans): good, = np.where(np.array(statuslist[i]) == 'done') flist = np.array(filelist[i])[good].tolist() # List of "done" files first_file = filelist[i][0] last_file = filelist[i][-1] mjd = ri.fname2mjd(last_file) tdif = Time.now().mjd - mjd if len(good) == len(filelist[i]) and tdif > ten_minutes: # All files in this scan are marked "done", so process the scan only if the plots do not already exist tmark = ri.fname2mjd(first_file) tmarkp = tmark+one_minute tmarkn = tmark-one_minute tmark = Time(tmark,format='mjd').iso.replace('-','').replace(':','').replace(' ','')[:12] tmarkp = Time(tmarkp,format='mjd').iso.replace('-','').replace(':','').replace(' ','')[:12] tmarkn = Time(tmarkn,format='mjd').iso.replace('-','').replace(':','').replace(' ','')[:12] f1 = glob.glob(path + 'pcT*'+tmark+'*.png') f2 = glob.glob(path + 'pcT*'+tmarkp+'*.png') f3 = glob.glob(path + 'pcT*'+tmarkn+'*.png') if f1 == [] and f2 == [] and f3 == []: print 'No files:',tmarkn,tmark,tmarkp,'found.' print 'Processing completed scan',i+1 graph(filelist[i],path=path) else: print 'Scan processing already complete. Skipping scan',i+1 elif len(good) == len(filelist[i]) and tdif < ten_minutes: # All files in this scan are marked "done", but it has been less than 10 min, so process the scan print 'Processing completed scan',i+1 graph(flist,path=path) elif len(good) == len(filelist[i])-1: # This scan is still active, so process all files up to this point. print 'Processing active scan',i+1 graph(flist,path=path)
def allday_process(path=None): ''' Process an all day list of corrected data files to create total power and baseline amplitude FITS spectrograms (planned for submission to NASA SDAC for support of the Parker Solar Probe). Fixed a problem when nans appear in the data--use nanmean() and nanmedian() ''' import glob import read_idb as ri from xspfits2 import tp_writefits if path is None: path = './' files = glob.glob(path + 'IDB*') files.sort() for file in files: out = ri.read_idb([file]) nant, npol, nf, nt = out['p'].shape nant = 13 # Use only data from tracking antennas azeldict = get_sql_info(Time(out['time'], format='jd')[[0, -1]]) idx = nearest_val_idx(out['time'], azeldict['Time'].jd) tracking = azeldict['TrackFlag'].T # Flag any data where the antennas are not tracking for i in range(nant): out['p'][i, :, :, ~tracking[i, idx]] = np.nan # Determine best 8 antennas med = np.nanmean(np.nanmedian(out['p'][:nant], 3), 1) # size nant,nf medspec = np.nanmedian(med, 0) # size nf p = np.polyfit(out['fghz'], medspec, 2) spec = np.polyval(p, out['fghz']).repeat(nant).reshape( nf, nant) # size nf, nant stdev = np.std(med - np.transpose(spec), 1) # size nant idx = stdev.argsort()[:8] # List of 8 best-fitting antennas # Use list of antennas to get final median total power dynamic spectrum med = np.nanmean(np.nanmedian(out['p'][idx], 0), 0) # Write the total power spectrum to a FITS file tp_writefits(out, med.astype(np.float32), filestem='TP_', outpath='./') # Form sum of intermediate baselines baseidx = np.array([ 29, 30, 31, 32, 33, 34, 42, 43, 44, 45, 46, 54, 55, 56, 57, 65, 66, 67, 75, 76, 84 ]) # Get uv distance for mid-time #uvdist = np.sqrt(out['uvw'][:,nt/2,0]**2 + out['uvw'][:,nt/2,1]**2 + out['uvw'][:,nt/2,2]**2) # Sort from low to high uv distance #bah = uvdist.argsort() # Use "intermediate" lengths, i.e. 20th to 39th in list, and sum amplitudes med = np.abs(np.nansum(np.nansum(out['x'][baseidx], 0), 0)) # Write the baseline amplitude spectrum to a FITS file tp_writefits(out, med.astype(np.float32), filestem='XP_', outpath='./')
def eovsa_ha(src,tin=None): ''' Input is a Time() object (or None to use current time). Returns the hour angle of the provided src for an observer at OVRO, for the time in the Time() object tin, or if not given, for the current moment. ''' if tin is None: tin = Time.now() ha = eovsa_lst(tin) - src.ra if ha > pi: ha = ha - 2*pi elif ha < -pi: ha = 2*pi + ha return ha
def onmove(event): if event.inaxes != spectrogram_ax: return i, j = abs(t - event.xdata).argmin(), abs(fghz - event.ydata).argmin() #print 'indexes are t=%f, f=%f'%(i, j) spec.set_data(fghz,tsys[:,i]) p, ffit, sfit = tpfit(np.log(fghz),np.log(tsys[:,i]),sigma=dlogtsys[:,i]) specfit.set_data(np.exp(ffit),np.exp(sfit)) specpt.set_data(fghz[j],tsys[j,i]) lc.set_data(t,tsys[j,:]) lcpt.set_data(t[i],tsys[j,i]) tstr = Time(t[i],format='plot_date').iso[11:19] lctxt.set_text('{:} UT, {:0.3f} GHz, {:0.3f} sfu'.format(tstr,fghz[j],tsys[j,i])) sptxt.set_text('{:} UT, {:0.3f} GHz, {:0.3f} sfu'.format(tstr,fghz[j],tsys[j,i])) fig.canvas.draw()
def apply_color_filter_to_dir(dir, save=True, display=False, hole_size=1000, object_size=600): t = Time() print("Applying filters to images\n") image_list = sorted(os.listdir(dir)) print('Number of images : {}'.format(len(image_list))) # image_list = ['_20190403091259.png'] # image_list = [image_list[11], image_list[22]] # image_list = ['_20190718215800.svs-080-32x-28662x73872-895x2308.png'] for item in tqdm(image_list): apply_color_filter_to_image(item, dir, save=True, display=False, hole_size=hole_size, object_size=object_size) print("Time to apply filters to all images: %s\n" % str(t.elapsed()))
def rd_stateframe(s,sf_num,n_expected): '''Does multiple reads of opened connection s until n_expected bytes are read. sf_num is sent to the ACC to indicate which stateframe to read (1 normally) ''' totlen = 0; totdata = []; data = '' sf_pck = struct.pack(">i",sf_num) s.settimeout(0.5) #sys.stdout.write('+') #sys.stdout.flush() # Flush stdout (/tmp/schedule.log) so we can see the output. try: s.send(sf_pck) #sys.stdout.write('.') #sys.stdout.flush() # Flush stdout (/tmp/schedule.log) so we can see the output. while totlen < n_expected: data = s.recv(n_expected) totdata.append(data) totlen = sum([len(i) for i in totdata]) #sys.stdout.write('-') #sys.stdout.flush() # Flush stdout (/tmp/schedule.log) so we can see the output. except socket.timeout: print Time.now().iso,'Socket time-out when reading stateframe from ACC' return ''.join(totdata)
def filter_entropy(np_img, neighborhood=9, threshold=5, output_type="uint8"): """ Filter image based on entropy (complexity). Args: np_img: Image as a NumPy array. neighborhood: Neighborhood size (defines height and width of 2D array of 1's). threshold: Threshold value. output_type: Type of array to return (bool, float, or uint8). Returns: NumPy array (bool, float, or uint8) where True, 1.0, and 255 represent a measure of complexity. """ t = Time() entr = sk_filters.rank.entropy(np_img, np.ones((neighborhood, neighborhood))) > threshold if output_type == "bool": pass elif output_type == "float": entr = entr.astype(float) else: entr = entr.astype("uint8") * 255 util.np_info(entr, "Entropy", t.elapsed()) return entr
def filter_histogram_equalization(np_img, nbins=256, output_type="uint8"): """ Filter image (gray or RGB) using histogram equalization to increase contrast in image. Args: np_img: Image as a NumPy array (gray or RGB). nbins: Number of histogram bins. output_type: Type of array to return (float or uint8). Returns: NumPy array (float or uint8) with contrast enhanced by histogram equalization. """ t = Time() # if uint8 type and nbins is specified, convert to float so that nbins can be a value besides 256 if np_img.dtype == "uint8" and nbins != 256: np_img = np_img / 255 hist_equ = sk_exposure.equalize_hist(np_img, nbins=nbins) if output_type == "float": pass else: hist_equ = (hist_equ * 255).astype("uint8") util.np_info(hist_equ, "Hist Equalization", t.elapsed()) return hist_equ
def filter_threshold(np_img, threshold, output_type="bool"): """ Return mask where a pixel has a value if it exceeds the threshold value. Args: np_img: Binary image as a NumPy array. threshold: The threshold value to exceed. output_type: Type of array to return (bool, float, or uint8). Returns: NumPy array representing a mask where a pixel has a value (T, 1.0, or 255) if the corresponding input array pixel exceeds the threshold value. """ t = Time() result = (np_img > threshold) if output_type == "bool": pass elif output_type == "float": result = result.astype(float) else: result = result.astype("uint8") * 255 util.np_info(result, "Threshold", t.elapsed()) return result
def filter_hysteresis_threshold(np_img, low=50, high=100, output_type="uint8"): """ Apply two-level (hysteresis) threshold to an image as a NumPy array, returning a binary image. Args: np_img: Image as a NumPy array. low: Low threshold. high: High threshold. output_type: Type of array to return (bool, float, or uint8). Returns: NumPy array (bool, float, or uint8) where True, 1.0, and 255 represent a pixel above hysteresis threshold. """ t = Time() hyst = sk_filters.apply_hysteresis_threshold(np_img, low, high) if output_type == "bool": pass elif output_type == "float": hyst = hyst.astype(float) else: hyst = (255 * hyst).astype("uint8") util.np_info(hyst, "Hysteresis Threshold", t.elapsed()) return hyst
def filter_binary_fill_holes(np_img, output_type="bool"): """ Fill holes in a binary object (bool, float, or uint8). Args: np_img: Binary image as a NumPy array. output_type: Type of array to return (bool, float, or uint8). Returns: NumPy array (bool, float, or uint8) where holes have been filled. """ t = Time() if np_img.dtype == "uint8": np_img = np_img / 255 result = sc_morph.binary_fill_holes(np_img) if output_type == "bool": pass elif output_type == "float": result = result.astype(float) else: result = result.astype("uint8") * 255 util.np_info(result, "Binary Fill Holes", t.elapsed()) return result
def singleprocess_apply_filters_to_images(save=True, display=False, html=True, image_num_list=None): """ Apply a set of filters to training images and optionally save and/or display the filtered images. Args: save: If True, save filtered images. display: If True, display filtered images to screen. html: If True, generate HTML page to display filtered images. image_num_list: Optionally specify a list of image slide numbers. """ t = Time() print("Applying filters to images\n") if image_num_list is not None: _, info = apply_filters_to_image_list(image_num_list, save, display) else: num_training_slides = slide.get_num_training_slides() (s, e, info) = apply_filters_to_image_range(0, num_training_slides, save, display) print("Time to apply filters to all images: %s\n" % str(t.elapsed())) if html: generate_filter_html_result(info)
def filter_local_otsu_threshold(np_img, disk_size=3, output_type="uint8"): """ Compute local Otsu threshold for each pixel and return binary image based on pixels being less than the local Otsu threshold. Args: np_img: Image as a NumPy array. disk_size: Radius of the disk structuring element used to compute the Otsu threshold for each pixel. output_type: Type of array to return (bool, float, or uint8). Returns: NumPy array (bool, float, or uint8) where local Otsu threshold values have been applied to original image. """ t = Time() local_otsu = sk_filters.rank.otsu(np_img, sk_morphology.disk(disk_size)) if output_type == "bool": pass elif output_type == "float": local_otsu = local_otsu.astype(float) else: local_otsu = local_otsu.astype("uint8") * 255 util.np_info(local_otsu, "Otsu Local Threshold", t.elapsed()) return local_otsu
def get_sat_info(names=None,doplot=False): import matplotlib.pylab as plt from util import Time f = urllib2.urlopen('http://www.lyngsat.com/tracker/america.html') lines = f.readlines() f.close() found_names = [] # Convert names list (if any) to upper case if not names is None: names = np.array([name.upper() for name in names]) for line in lines: if line.find('<font face="Arial"><font size=2><a href="http://www.lyngsat.com/tracker/') != -1: if line.find('bgcolor=#ffffff') == -1: name = line.split('http://www.lyngsat.com/tracker/')[1].split('.html')[0] if names is None: # If no name list given, mark all satellites as found found = True else: # Satellite is found if name is in names found = len(np.where(name.upper() == names)[0]) == 1 if found: found_names.append(name) out = [] for name in found_names: print 'Reading information for satellite',name, outi = sat_info(name) if outi is not None: out.append(outi) if doplot: for i,sat in enumerate(out): nf = len(sat['freqlist']) plt.plot((float(sat['loc'])-118)*np.ones(nf),sat['freqlist'],'.') plt.text(float(sat['loc'])-118,sat['freqlist'][0],str(i),ha='center',va='top') plt.xlabel('HA [deg]') plt.ylabel('Frequency [MHz]') plt.title('Geosat Information for '+Time.now().iso) return out
def get_calfac(t=None): ''' Read total power and auto-correlation calibration factors from the SQL database, for the time specified by Time() object t, or if None, at the next earlier calibration time to the current time. ''' tpcal_type = 10 # Calibration type specified in cal_header.py if t is None: t = Time.now() xml, buf = ch.read_cal(tpcal_type,t=t) fghz = stateframe.extract(buf,xml['FGHz']) nf = len(fghz) tpcalfac = np.zeros((13,2,nf),np.float) tpoffsun = np.zeros((13,2,nf),np.float) accalfac = np.zeros((13,2,nf),np.float) acoffsun = np.zeros((13,2,nf),np.float) nant = len(xml['Antenna']) for i in range(nant): iant = stateframe.extract(buf,xml['Antenna'][i]['Antnum'])-1 tpcalfac[iant] = stateframe.extract(buf,xml['Antenna'][i]['TPCalfac']) accalfac[iant] = stateframe.extract(buf,xml['Antenna'][i]['ACCalfac']) tpoffsun[iant] = stateframe.extract(buf,xml['Antenna'][i]['TPOffsun']) acoffsun[iant] = stateframe.extract(buf,xml['Antenna'][i]['ACOffsun']) return {'fghz':fghz,'timestamp':stateframe.extract(buf,xml['Timestamp']), 'tpcalfac':tpcalfac,'accalfac':accalfac,'tpoffsun':tpoffsun,'acoffsun':acoffsun}
def Save(self): # Send saved delays to the SQL database and the ACC (if the data source is not 'Simulation') if self.data_source == 'Data': if (Time.now().mjd - self.time.mjd) > 1.0: question = "Warning: Data more than a day old. Are you sure you want to save delays to SQL and ACC?" else: question = "Save delays to SQL and ACC?" import cal_header as ch # Calculate delays relative to Ant 1 and tack Ant1-14 delay at end delays = self.delays[0] - self.delays delays = np.append(delays,self.delays[0]) # Have to change the sign of Ant 14 Y-X delay, hence the minus sign xydelays = np.append(self.xydelays,-float(self.dla14.get())) # Do not change delays where both delays and xydelays are zero # which is taken as a missing antenna bad, = np.where(self.delays == 0) bad2, = np.where(self.xydelays == 0) idx1,idx2 = common_val_idx(bad,bad2) delays[bad[idx1]] = 0.0 if askyesno("Write Delays",question): # All Y-X delays need a sign flip, hence the minus sign ch.dla_update2sql(delays,-xydelays) #ch.dla_update2sql(-delays,xydelays) # 300 MHz design uses flipped signs! ch.dla_censql2table()
xel, el = solpnt.dradec2dazel(y['ra0'],y['dec0'],t1,ramed*user2rad/cosdec,decmed*user2rad) ax[ant/2,ant % 2].text(0.65,0.80,'XEL = {:6.3f}'.format(xel*180./np.pi),transform=ax[ant/2,ant % 2].transAxes) ax[ant/2,ant % 2].text(0.65,0.65,'EL = {:6.3f}'.format(el*180./np.pi),transform=ax[ant/2,ant % 2].transAxes) plt.draw() if __name__ == "__main__": ''' Run automatically via cron job, or at command line. Usage: python /common/python/current/calibration.py "2014-12-15 18:30" where the time string is optional. If omitted, the current time is used. The logic is to check whether there is a new SOLPNTCAL available, and analyze it if so. Compares times from solpnt.find_solpnt() with current time and analyzes any that are between 5 and 10 minutes old. ''' arglist = str(sys.argv) t = Time.now() if len(sys.argv) == 2: try: t = Time(sys.argv[1]) except: print 'Cannot interpret',sys.argv[1],'as a valid date/time string.' exit() timestamp = t.lv # Current timestamp times, tstamp = solpnt.find_solpnt(t) # Find first SOLPNTCAL occurring after timestamp (time given by Time() object) if times == []: # No SOLPNTCAL scans (yet) print t.iso[:19]+': No SOLPNTCAL scans for today' exit() elif type(times[0]) is np.ndarray:
tmarkp = Time(tmarkp,format='mjd').iso.replace('-','').replace(':','').replace(' ','')[:12] tmarkn = Time(tmarkn,format='mjd').iso.replace('-','').replace(':','').replace(' ','')[:12] f1 = glob.glob(path + 'pcT*'+tmark+'*.png') f2 = glob.glob(path + 'pcT*'+tmarkp+'*.png') f3 = glob.glob(path + 'pcT*'+tmarkn+'*.png') if f1 == [] and f2 == [] and f3 == []: print 'No files:',tmarkn,tmark,tmarkp,'found.' print 'Processing completed scan',i+1 graph(filelist[i],path=path) else: print 'Scan processing already complete. Skipping scan',i+1 elif len(good) == len(filelist[i]) and tdif < ten_minutes: # All files in this scan are marked "done", but it has been less than 10 min, so process the scan print 'Processing completed scan',i+1 graph(flist,path=path) elif len(good) == len(filelist[i])-1: # This scan is still active, so process all files up to this point. print 'Processing active scan',i+1 graph(flist,path=path) if __name__ == '__main__': import matplotlib matplotlib.use('Agg') path='/common/webplots/phasecal/' t1 = Time.now().jd-0.25 t2 = Time.now().jd trange = Time([t1,t2],format='jd') print trange.iso pcal_anal(trange,path=path)
# Open same filename for writing (overwrites contents if file exists) f = open(filename,'wb') for i in range(len(times)): f.write(struct.pack('dfB',*(times[i].lv,tlevel[i],bflag[i]))) f.close() return times, tlevel, bflag if __name__ == "__main__": ''' For non-interactive use, use a backend that does not require a display Usage python /common/python/current/flare_monitor.py "2014-12-20" ''' import glob, shutil import matplotlib, sys, util matplotlib.use('Agg') import matplotlib.pyplot as plt t = Time.now() print t.iso[:19],': ', if len(sys.argv) == 2: try: t = Time(sys.argv[1]) except: print 'Cannot interpret',sys.argv[1],'as a valid date/time string.' exit() if (t.mjd % 1) < 3./24: # Special case of being run at or before 3 AM (UT), so change to late "yesterday" to finish out # the previous UT day imjd = int(t.mjd) t = Time(float(imjd-0.001),format='mjd') # Check if cross-correlation plot already exists f, ax = plt.subplots(1,1)
def findfiles(trange, projid='PHASECAL', srcid=None): '''identify refcal files ***Optional Keywords*** projid: String--PROJECTID in UFBD records. Default is PHASECAL srcid: String--SOURCEID in UFBD records. Can be a string or a list ''' from util import nearest_val_idx import struct, time, glob, sys, socket import dump_tsys fpath = '/data1/eovsa/fits/UDB/' + trange[0].iso[:4] + '/' t1 = trange[0].to_datetime() t2 = trange[1].to_datetime() daydelta = (t2.date() - t1.date()).days tnow = Time.now() if t1.date() != t2.date(): # End day is different than start day, so read and concatenate two fdb files ufdb = dump_tsys.rd_ufdb(trange[0]) for ll in xrange(daydelta): ufdb2 = dump_tsys.rd_ufdb(Time(trange[0].mjd + ll + 1, format='mjd')) if ufdb2: for key in ufdb.keys(): ufdb.update({key: np.append(ufdb[key], ufdb2[key])}) else: # Both start and end times are on the same day ufdb = dump_tsys.rd_ufdb(trange[0]) if srcid: if type(srcid) is str: srcid = [srcid] sidx_ = np.array([]) for sid in srcid: sidx, = np.where((ufdb['PROJECTID'] == projid) & (ufdb['SOURCEID'] == sid)) sidx_ = np.append(sidx_, sidx) scanidx = np.sort(sidx_).astype('int') else: scanidx, = np.where(ufdb['PROJECTID'] == projid) # List of scan start times tslist = Time(ufdb['ST_TS'][scanidx].astype(float).astype(int), format='lv') # List of PHASECAL scan end times telist = Time(ufdb['EN_TS'][scanidx].astype(float).astype(int), format='lv') k = 0 # Number of scans within timerange m = 0 # Pointer to first scan within timerange flist = [] status = [] tstlist = [] tedlist = [] srclist = [] for i in range(len(tslist)): if tslist[i].jd >= trange[0].jd and tslist[i].jd <= trange[1].jd: flist.append(fpath + ufdb['FILE'][scanidx[i]].astype('str')) tstlist.append(tslist[i]) tedlist.append(telist[i]) srclist.append(ufdb['SOURCEID'][scanidx[i]]) k += 1 if k == 0: print 'No scans found within given time range for ' + projid return None else: print 'Found', k, 'scans in timerange.' return {'scanlist': flist, 'srclist': srclist, 'tstlist': tstlist, 'tedlist': tedlist}
def findfile(trange): from util import nearest_val_idx import struct, time, glob, sys, socket import dump_tsys host = socket.gethostname() if host == 'dpp': fpath = '/data1/IDB/' else: fpath = '/data1/eovsa/fits/IDB/' t1 = str(trange[0].mjd) t2 = str(trange[1].mjd) tnow = Time.now() if t1[:5] != t2[:5]: # End day is different than start day, so read and concatenate two fdb files fdb = {} fdb1 = dump_tsys.rd_fdb(trange[0]) fdb2 = dump_tsys.rd_fdb(trange[1]) for key in fdb1.keys(): fdb.update({key:np.append(fdb1[key],fdb2[key])}) else: # Both start and end times are on the same day fdb = dump_tsys.rd_fdb(trange[0]) scanidx, = np.where(fdb['PROJECTID'] == 'PHASECAL') scans,sidx = np.unique(fdb['SCANID'][scanidx],return_index=True) eidx = np.append(sidx[1:],len(scanidx)) - 1 # List of PHASECAL scan start times tslist = Time(fdb['ST_TS'][scanidx[sidx]].astype(float).astype(int),format='lv') # List of PHASECAL scan end times telist = Time(fdb['EN_TS'][scanidx[eidx]].astype(float).astype(int),format='lv') # Remove any bad values (i.e. those with ST_SEC = 0) good, = np.where(fdb['ST_SEC'][scanidx[sidx]] != '0') tslist = tslist[good] telist = telist[good] k = 0 # Number of scans within timerange m = 0 # Pointer to first scan within timerange flist = [] status = [] tstlist = [] for i in range(len(tslist)): if tslist[i].jd >= trange[0].jd and telist[i].jd <= trange[1].jd: # Time is in range, so add it k += 1 else: # Time is too early, so skip it m += 1 if k == 0: print 'No phase calibration data within given time range' return None else: print 'Found',k,'scans in timerange.' for i in range(k): f1 = fdb['FILE'][np.where(fdb['SCANID'] == scans[m+i])].astype('str') f2 = [fpath + f for f in f1] flist.append(f2) tstlist.append(tslist[m+i]) ted = telist[m+i] # Mark all files done except possibly the last fstatus = ['done']*len(f1) # Check if last file end time is less than 10 min ago if (tnow.jd - ted.jd) < (600./86400): # Current time is less than 10 min after this scan fstatus[-1] = 'undone' status.append(fstatus) return {'scanlist':flist,'status':status,'tstlist':tstlist}
def TPcal(x, y, calfac, offsun): ''' Writes Total Power calibration factors and offsun IF level to SQL database (caltype = 1) ''' # ******* # Version has to be updated at same time as xml description in cal_header.py # ******* version = 1.0 fghz = x['fghz'] nf = len(fghz) tstamp = x['ut'][0] # Start time of SOLPNTCAL dims = calfac.shape buf = '' if nf == 448: buf = struct.pack('d',tstamp) buf += struct.pack('d',version) # Case of 448 frequencies only # Array dimension for frequency list buf += struct.pack('I',448) # Frequency list buf += struct.pack('448f',*fghz) # Polarization array (dimension, then two states--XX, YY) buf += struct.pack('Iii',*[2,-5,-6]) # Array dimension for Antenna cluster (2.1 m ants only) buf += struct.pack('I',13) # Empty array for filling in for missing antennas empty = np.zeros(448,'float') for i in range(dims[2]): # Array dimensions for freq/poln for this antenna buf += struct.pack('2I',*[448,2]) # Cal factors for the two polarizations buf += struct.pack('448f',*calfac[0,:,i]) buf += struct.pack('448f',*calfac[1,:,i]) # Array dimensions for freq/poln for this antenna buf += struct.pack('2I',*[448,2]) # Offsun IF level for the two polarizations buf += struct.pack('448f',*offsun[0,:,i]) buf += struct.pack('448f',*offsun[1,:,i]) for i in range(dims[2],13): # Same as above for missing antennas buf += struct.pack('2I',*[448,2]) buf += struct.pack('448f',*empty) buf += struct.pack('448f',*empty) buf += struct.pack('2I',*[448,2]) buf += struct.pack('448f',*empty) buf += struct.pack('448f',*empty) t = Time.now() timestamp = t.lv cursor = dbutil.get_cursor() cursor.execute('insert into aBin (Timestamp,Version,Description,Bin) values (?,?,?,?)', timestamp,1.0+version/10.,'Total Power Calibration',dbutil.stateframedef.pyodbc.Binary(buf)) # ******* # NB! To retrieve these large binary data strings, one must declare text size on select, e.g. # cursor.execute('set textsize 100000 select * from aBin where version = 1.1 ') # where the given size is greater than the size desired. # ******* # Temporarily store in disk file for checking format... #f = open('/tmp/tpcal.dat','wb') #f.write(buf) #f.close() cursor.commit() cursor.close()
def capture_fig(useroach=[1, 2], print_attn=False): import matplotlib.pyplot as plt from util import Time # Select which pair of ROACH boards is used for plot # Grab 1.02 s of data (19200 + 384 packets)*2 to make sure we have at least 1 s of good data if useroach == [1, 2]: iface = "eth2" else: iface = "eth3" ret = sendcmd("/usr/sbin/tcpdump -i " + iface + " -c 39168 -w /home/user/Python/" + iface + ".pcap -s 2000") out = rd_spec(iface + ".pcap", boardID=useroach[0] - 1) # out.shape = (100,4096,8) out2 = rd_spec(iface + ".pcap", boardID=useroach[1] - 1) # out2.shape = (100,4096,8) lines = np.array(list_header(iface + ".pcap", boardID=useroach[0] - 1)) idx1 = np.where(np.char.startswith(lines, "Acc"))[0][0] lines = np.append(lines[idx1 + 1 :], lines[:idx1]) if idx1 == 0: out = out[0:50, :, :] else: out = out[50 - idx1 : 100 - idx1, :, :] ovfl = [] for line in lines: ovfl.append(int(line[30:36])) lines = np.array(list_header(iface + ".pcap", boardID=useroach[1] - 1)) idx2 = np.where(np.char.startswith(lines, "Acc"))[0][0] lines = np.append(lines[idx2 + 1 :], lines[:idx2]) if idx2 == 0: out2 = out2[0:50, :, :] else: out2 = out2[50 - idx2 : 100 - idx2, :, :] ovfl2 = [] for line in lines: ovfl2.append(int(line[30:36])) # Reorganize data order to correspond to the 34 bands of solar.fsq, # putting the repeated bands 1,2,3,4 together for averaging idx = np.array( [ 0, 10, 20, 30, 40, 1, 11, 21, 31, 41, 2, 12, 22, 32, 42, 3, 13, 23, 33, 43, 4, 5, 6, 7, 8, 9, 14, 15, 16, 17, 18, 19, 24, 25, 26, 27, 28, 29, 34, 35, 36, 37, 38, 39, 44, 45, 46, 47, 48, 49, ] ) out = out[(idx + idx1) % 50, :, :] out2 = out2[(idx + idx2) % 50, :, :] # Put board n+1 P data into last four (P-squared) slots of board n, for convenience out[:, :, 4:8] = out2[:, :, 0:4] # Average repeated channels and place into slots 16-19 out[19, :, :] = out[15:20, :, :].sum(0) / 5.0 out[18, :, :] = out[10:15, :, :].sum(0) / 5.0 out[17, :, :] = out[5:10, :, :].sum(0) / 5.0 out[16, :, :] = out[0:5, :, :].sum(0) / 5.0 # Truncate to slots 16-49 (34 slots) out = out[16:, :, :] if print_attn: # At this point, for solar sequence, out[0:34,:,0:8] corresponds to # the power in each antenna/feed on these two ROACHes. Print out the sum # in a nice tabular format: attn = [] for i in range(8): attn_list = (10 * np.log10(out[:, :, i].sum(1) / 7.0)).astype("int") / 2 * 2 # bad = np.where(attn_list < 0)[0] # if len(bad) > 0: # attn_list[bad] = 0 # Append list of 34 attenuations attn.append(attn_list) attn = np.array(attn) attn.shape = (8, 34) return attn ovfl = np.array(ovfl)[idx] ovfl2 = np.array(ovfl2)[idx] ovfl[19] = ovfl[15:20].sum() / 5.0 ovfl[18] = ovfl[10:15].sum() / 5.0 ovfl[17] = ovfl[5:10].sum() / 5.0 ovfl[16] = ovfl[0:5].sum() / 5.0 ovfl2[19] = ovfl2[15:20].sum() / 5.0 ovfl2[18] = ovfl2[10:15].sum() / 5.0 ovfl2[17] = ovfl2[5:10].sum() / 5.0 ovfl2[16] = ovfl2[0:5].sum() / 5.0 return Time.now(), out, ovfl, ovfl2
def rd_ACCfile(): '''Reads key variables from ACC.ini file on ACC (using urllib2) ''' # List of strings to search for s0 = '[Stateframe]' s1 = 'bin size = ' s2 = 'template path = ' n0 = '[Network]' n1 = 'TCP.schedule.port = ' n2 = 'TCP.stateframe.port = ' n3 = 'TCP.schedule.stateframe.port = ' r0 = '[ROACH]' r1 = 'boffile = ' userpass = '******' ACCfile = None if socket.getfqdn().find('solar.pvt') != -1: try: ACCfile = urllib2.urlopen('ftp://'+userpass+'acc.solar.pvt/ni-rt/startup/acc.ini',timeout=0.5) except: # Timeout error print Time.now().iso,'FTP connection to ACC timed out' # Since this is the HELIOS machine, make a disk copy of ACC.ini in the # current (dropbox) directory. This will be used by other instances of # sf_display() on other machines that do not have access to acc.solar.pvt. try: lines = ACCfile.readlines() o = open('acc.ini','w') for line in lines: o.write(line+'\n') o.close() ACCfile.close() ACCfile = urllib2.urlopen('ftp://'+userpass+'acc.solar.pvt/ni-rt/startup/acc.ini',timeout=0.5) # Also read XML file for stateframe from ACC, and decode template for later use sf, version = xml_ptrs() except: pass if ACCfile is None: # ACC not reachable? Try reading static files. print 'Cannot ftp ACC.ini. Reading static acc.ini and stateframe.xml from current directory instead.' ACCfile = open('acc.ini','r') # Also read XML file for stateframe from static file, and decode template for later use sf, version = xml_ptrs('stateframe.xml') for line in ACCfile: if s0 in line: # String s0 ([Stateframe]) found for line in ACCfile: if s1 in line: binsize = int(line[len(s1):]) elif s2 in line: xmlpath = line[len(s2):] break elif line == '': break if n0 in line: # String n0 ([Network]) found for line in ACCfile: if n1 in line: scdport = int(line[len(n1):]) elif n2 in line: sfport = int(line[len(n2):]) print '\nConnecting to ACC at port:',sfport elif n3 in line: scdsfport = int(line[len(n3):]) break elif not line: break if r0 in line: # String r0 ([ROACH]) found for line in ACCfile: if r1 in line: boffile = line[len(r1):].strip() elif not line: break ACCfile.close() accdict = {'host':'acc.solar.pvt','binsize':binsize,'xmlpath':xmlpath, 'scdport':scdport,'sfport':sfport,'scdsfport':scdsfport,'sf':sf,'version':version,'boffile':boffile} #if socket.gethostname() != 'helios': # The host is not OVSA, so assume port forwarding of stateframe port # to localhost port 6341 #accdict['host'] = 'localhost' return accdict
def rd_ACCfile(): """Reads key variables from ACC.ini file on ACC (using urllib2) """ # List of strings to search for s0 = "[Stateframe]" s1 = "bin size = " s2 = "template path = " n0 = "[Network]" n1 = "TCP.schedule.port = " n2 = "TCP.stateframe.port = " n3 = "TCP.schedule.stateframe.port = " r0 = "[ROACH]" r1 = "boffile = " userpass = "******" ACCfile = None if socket.getfqdn().find("solar.pvt") != -1: try: ACCfile = urllib2.urlopen("ftp://" + userpass + "acc.solar.pvt/ni-rt/startup/acc.ini", timeout=0.5) except: # Timeout error print Time.now().iso, "FTP connection to ACC timed out" # Since this is the HELIOS machine, make a disk copy of ACC.ini in the # current (dropbox) directory. This will be used by other instances of # sf_display() on other machines that do not have access to acc.solar.pvt. try: lines = ACCfile.readlines() o = open("acc.ini", "w") for line in lines: o.write(line + "\n") o.close() ACCfile.close() ACCfile = urllib2.urlopen("ftp://" + userpass + "acc.solar.pvt/ni-rt/startup/acc.ini", timeout=0.5) # Also read XML file for stateframe from ACC, and decode template for later use sf, version = xml_ptrs() except: pass if ACCfile is None: # ACC not reachable? Try reading static files. print "Cannot ftp ACC.ini. Reading static acc.ini and stateframe.xml from current directory instead." ACCfile = open("acc.ini", "r") # Also read XML file for stateframe from static file, and decode template for later use sf, version = xml_ptrs("stateframe.xml") for line in ACCfile: if s0 in line: # String s0 ([Stateframe]) found for line in ACCfile: if s1 in line: binsize = int(line[len(s1) :]) elif s2 in line: xmlpath = line[len(s2) :] break elif line == "": break if n0 in line: # String n0 ([Network]) found for line in ACCfile: if n1 in line: scdport = int(line[len(n1) :]) elif n2 in line: sfport = int(line[len(n2) :]) print "\nConnecting to ACC at port:", sfport elif n3 in line: scdsfport = int(line[len(n3) :]) break elif not line: break if r0 in line: # String r0 ([ROACH]) found for line in ACCfile: if r1 in line: boffile = line[len(r1) :].strip() elif not line: break ACCfile.close() accdict = { "host": "acc.solar.pvt", "binsize": binsize, "xmlpath": xmlpath, "scdport": scdport, "sfport": sfport, "scdsfport": scdsfport, "sf": sf, "version": version, "boffile": boffile, } # if socket.gethostname() != 'helios': # The host is not OVSA, so assume port forwarding of stateframe port # to localhost port 6341 # accdict['host'] = 'localhost' return accdict
def rd_rstnflux(t=None,f=None,recur=False): ''' Reads the RSTN/Penticton quiet Sun solar flux density for the date specified in the Time() object t. Reads from file handle f, if supplied, or else attempts to retrieve the data from NOAA if the date is within 45 days of today. Otherwise, defaults to archive file /common/tmp/txt/radioflux.noa. Under certain conditions, the routine calls itself after setting recur to True. On success, returns a 9-element list of frequencies and a 9-element list of corresponding flux densities On failure, returns None, None Although this routine could work with only the archive file, accessing the NOAA database is retained since it allows the routine to be used anywhere, not just on machines with access to the archfile location. ''' # Update these if locations change archfile = '/common/tmp/txt/radioflux.noa' #noaa_url = 'http://legacy_www.swpc.noaa.gov/ftpdir/lists/radio/45day_rad.txt' noaa_url = 'ftp://ftp.swpc.noaa.gov/pub/lists/radio/45day_rad.txt' if t is None: t = Time.now() datstr = t.datetime.strftime("%Y %b %d") if f is None: today = Time.now() if today.mjd - t.mjd > 44: recur = True # Set this to prevent unneeded recursive call else: try: f = urllib2.urlopen(noaa_url) print 'Data will be retrieved from NOAA 45-day file.' lines = f.readlines() if datstr[9] == '0': datstr = datstr[:9]+datstr[10:] except: print 'NOAA 45-day file not reachable at',noaa_url recur = True # Set this to prevent unneeded recursive call if recur: try: f = open(archfile,'r') print 'Data will be retrieved from archive file',archfile,'.' lines = f.readlines() f.close() except: print 'Error: Archive file',archfile,'not found.' return None, None else: lines = f.readlines() f.close() frq = np.zeros(9,'int') flux = np.zeros(9,'float') for i,line in enumerate(lines): if line.find(datstr) != -1: for j in range(9): dat = lines[i+j+1].split() frq[j] = int(dat.pop(0)) flxarr = np.array(dat,'int') good = np.where(flxarr != -1)[0] if len(good) != 0: flux[j] = np.median(flxarr[good]) else: flux[j] = np.nan break if frq[0] == 0: if recur: print 'Error: Date',datstr,'not found.' return None, None else: print 'Warning: Date',datstr,'not found in 45-day file at',noaa_url print 'Data will be retrieved from archive file.' try: f = open(archfile,'r') frq, flux = rd_rstnflux(t,f,recur=True) except: print 'Error: File',archfile,'not found.' return None, None return frq, flux
def get_chanmask(fsequence,t=None): ''' Given a frequency tuning sequence (specifies which bands are used in each of the 50 time slots of the 1-s cycle), read the appropriate RFI-survey file and generate the channel mask for flagging. Optionally provide a Time() object with the desired date. Not sure why this would be wanted, but just in case we need to go back to older data... Returns a 204800-byte array (50*4096) with 1's where channels are kept, and 0's were channels are to be flagged. ''' import urllib2, copy from util import Time userpass = '******' if t is None: # Get current date t = Time.now() now = t.iso[:10].replace('-','') f = urllib2.urlopen('ftp://'+userpass+'acc.solar.pvt/parm',timeout=0.5) files = f.readlines() f.close() goodfile = '' flist = [] # Find files that start with 'rfi' for file in files: fname = file.strip().split()[-1] if fname.find('rfi') != -1: flist.append(fname) flist.sort() for file in flist: # This line starts with rfi, so interpret rest as a date datstr = file[3:-4] try: idat = int(datstr) if int(now) >= int(datstr): # Current date is same or later than file date, so this is potentially the file we want goodfile = file except: # Filename began with 'rfi', but date could not be converted to integer, so skip this file pass bandmask = np.ones((34,4096),'byte') # start with no flagged channels if goodfile != '': # Got an appropriate file (last one prior to current date) # Format of the file is BAND: List, where BAND is integer 1-34, followed by a colon ':', # and List is a comma-separated list of integers or ranges, e.g. 385, 861, 2945-2946, etc. # Both bands and channel lists can be in any order. RFIfile = urllib2.urlopen('ftp://'+userpass+'acc.solar.pvt/parm/'+goodfile,timeout=0.5) lines = RFIfile.readlines() for line in lines: band, rest = line.split(':') chanlist = rest.split(',') for chaninfo in chanlist: if chaninfo.find('-') == -1: # This is a single channel bandmask[int(band)-1,int(chaninfo)] = 0 else: # This is a range of channels chans = chaninfo.split('-') bandmask[int(band)-1,np.arange(int(chans[0]),int(chans[1])+1)] = 0 # We have bandmask, the channel mask as a function of band. Now use the supplied fsequence # to generate chanmask, the mask for the 1-s cycle. chanmask = np.ones((50,4096),'byte') bands = np.array(fsequence.split(',')).astype('int')-1 for i,band in enumerate(bands): # Transfers the 4096 values for band into the ith slot of chanmask chanmask[i,:] = copy.copy(bandmask[band,:]) if ifbw == 400.: chanmask[i,:2148] = 0 # Temporary--flag all overlapped channels for 800 MHz clock chanmask.shape = (204800) # NB: This line overrides everything done above! I.e. there are no masked channels. if ifbw == 600.: chanmask = np.ones(204800,'byte') return chanmask
def capture_fig(useroach=[1,2],print_attn=False): import matplotlib.pyplot as plt from util import Time # Select which pair of ROACH boards is used for plot # Grab 1.02 s of data (19200 + 384 packets)*2 to make sure we have at least 1 s of good data npkts = (19200 + 384)*3 if useroach == [1,2,5]: iface = 'eth2' else: iface = 'eth3' ret = sendcmd('/usr/sbin/tcpdump -i '+iface+' -c '+str(npkts)+' -w /home/user/Python/'+iface+'.pcap -s 2000') out1 = rd_spec(iface+'.pcap',boardID=useroach[0]-1) out2 = rd_spec(iface+'.pcap',boardID=useroach[1]-1) out3 = rd_spec(iface+'.pcap',boardID=useroach[2]-1) lines = np.array(list_header(iface+'.pcap',boardID=useroach[0]-1)) idx1 = np.where(np.char.startswith(lines,'Acc'))[0][0] lines = np.append(lines[idx1+1:],lines[:idx1]) if idx1 == 0: out1 = out1[0:50,:,:] else: out1 = out1[50-idx1:100-idx1,:,:] ovfl1 = [] for line in lines: ovfl1.append(int(line[30:36])) lines = np.array(list_header(iface+'.pcap',boardID=useroach[1]-1)) idx2 = np.where(np.char.startswith(lines,'Acc'))[0][0] lines = np.append(lines[idx2+1:],lines[:idx2]) if idx2 == 0: out2 = out2[0:50,:,:] else: out2 = out2[50-idx2:100-idx2,:,:] ovfl2 = [] for line in lines: ovfl2.append(int(line[30:36])) lines = np.array(list_header(iface+'.pcap',boardID=useroach[2]-1)) idx3 = np.where(np.char.startswith(lines,'Acc'))[0][0] lines = np.append(lines[idx3+1:],lines[:idx3]) if idx3 == 0: out3 = out3[0:50,:,:] else: out3 = out3[50-idx3:100-idx3,:,:] ovfl3 = [] for line in lines: ovfl3.append(int(line[30:36])) # Reorganize data order to correspond to the 34 bands of solar.fsq, # putting the repeated bands 1,2,3,4 together for averaging idx = np.array([0,10,20,30,40,1,11,21,31,41,2,12,22,32,42,3,13,23,33,43, 4,5,6,7,8,9,14,15,16,17,18,19,24,25,26,27,28,29,34,35,36, 37,38,39,44,45,46,47,48,49]) out = np.concatenate((out1[(idx+idx1)%50,:,0:4],out2[(idx+idx1)%50,:,0:4],out3[(idx+idx1)%50,:,0:4]),axis=2) # Average repeated channels and place into slots 16-19 out[19,:,:] = out[15:20,:,:].sum(0)/5. out[18,:,:] = out[10:15,:,:].sum(0)/5. out[17,:,:] = out[5:10,:,:].sum(0)/5. out[16,:,:] = out[0:5,:,:].sum(0)/5. # Truncate to slots 16-49 (34 slots) out = out[16:,:,:] if print_attn: # At this point, for solar sequence, out[0:34,:,0:12] corresponds to # the power in each antenna/feed on these three ROACHes. Print out the sum # in a nice tabular format: attn = [] for i in range(12): attn_list = (10*np.log10(out[:,:,i].sum(1)/7.0)).astype('int')/2 * 2 #bad = np.where(attn_list < 0)[0] #if len(bad) > 0: # attn_list[bad] = 0 # Append list of 34 attenuations attn.append(attn_list) attn = np.array(attn) attn.shape = (12,34) return attn ovfl1 = np.array(ovfl1)[idx] ovfl2 = np.array(ovfl2)[idx] ovfl3 = np.array(ovfl3)[idx] ovfl1[19] = ovfl1[15:20].sum()/5. ovfl1[18] = ovfl1[10:15].sum()/5. ovfl1[17] = ovfl1[5:10].sum()/5. ovfl1[16] = ovfl1[0:5].sum()/5. ovfl2[19] = ovfl2[15:20].sum()/5. ovfl2[18] = ovfl2[10:15].sum()/5. ovfl2[17] = ovfl2[5:10].sum()/5. ovfl2[16] = ovfl2[0:5].sum()/5. ovfl3[19] = ovfl3[15:20].sum()/5. ovfl3[18] = ovfl3[10:15].sum()/5. ovfl3[17] = ovfl3[5:10].sum()/5. ovfl3[16] = ovfl3[0:5].sum()/5. ovfl = np.concatenate((ovfl1,ovfl2,ovfl3)) ovfl.shape = (50,3) return Time.now(),out,ovfl