def get_projects_nosql(t): ''' Read all projects from FDB file for the current date and return a summary as a dictionary with keys Timestamp, Project, and EOS (another timestamp) ''' import dump_tsys as dt # timerange is 12 UT to 12 UT on next day, relative to the day in Time() object t trange = Time([int(t.mjd) + 12. / 24, int(t.mjd) + 36. / 24], format='mjd') tstart = t.iso[2:10].replace('-', '') + '120000' t2 = Time(int(t.mjd) + 1, format='mjd') tend = t2.iso[2:10].replace('-', '') + '120000' fdb = dt.rd_fdb(t) fdb2 = dt.rd_fdb(t2) if fdb == {}: # No FDB file found, so return empty project dictionary print 'No Project data [FDB file] found for the given date.' return {} if fdb == {}: pass else: # Concatenate the two dicts into one fdb = dict([(k, np.concatenate((fdb.get(k, []), fdb2.get(k, [])))) for k in set(fdb) | set(fdb2)]) # Get "good" indexes for times between 12 UT on date and 12 UT on next date gidx, = np.where( np.logical_and(fdb['SCANID'] > tstart, fdb['SCANID'] < tend)) scanid, idx = np.unique(fdb['SCANID'][gidx], return_index=True) sidx = gidx[idx] # Indexes into fdb for the start of each scan # Get the project IDs for scans during the period projdict = { 'Timestamp': fdb['ST_TS'][sidx].astype(float), 'Project': fdb['PROJECTID'][sidx], 'EOS': fdb['EN_TS'][sidx].astype(float) } return projdict
def allday_udb_corr(trange, outpath='./'): ''' Perform udb_corr() on all solar scans in the Time() trange given, or the observing day of the date given if trange is a single time. The output path name can be given, default is the current path. ''' import dump_tsys as dt from util import fname2mjd if len(trange) == 1: mjd = int(trange.mjd) t0, t1 = Time([mjd + 0.5, mjd + 1.2], format='mjd') else: t0 = trange[0] t1 = trange[1] fdb = dt.rd_fdb(t0) flist = fdb['FILE'][np.where(fdb['PROJECTID'] == 'NormalObserving')] if int(t1.mjd) != int(t0.mjd): fdb = dt.rd_fdb(t1) if fdb != {}: flist2 = fdb['FILE'][np.where( fdb['PROJECTID'] == 'NormalObserving')] flist = np.concatenate((flist, flist2)) mjd = fname2mjd(flist) idx, = np.where(np.logical_and(mjd >= t0.mjd, mjd <= t1.mjd)) # Assume we are working on Pipeline, and branch according to filename prefix if flist[0][:3] == 'UDB': year = Time(mjd[0], format='mjd').iso[:4] fdir = '/data1/eovsa/fits/UDB/' + year + '/' getdate = False elif flist[0][:3] == 'IDB': # fdir = '/data1/eovsa/fits/IDB/' fdir = get_idbdir(t=t0) getdate = True for i, file in enumerate(flist[idx]): if getdate: date = Time(mjd[idx[i]], format='mjd').iso[:10].replace('-', '') filename = fdir + date + '/' + file else: filename = fdir + file print 'Processing', filename try: udb_corr(filename, calibrate=True, outpath=outpath) except: print 'Error processing', filename, ' Skipping...'
def findfile(trange): from util import nearest_val_idx import struct, time, glob, sys, socket import dump_tsys host = socket.gethostname() if host == 'dpp': fpath = '/data1/IDB/' else: fpath = '/data1/eovsa/fits/IDB/' t1 = str(trange[0].mjd) t2 = str(trange[1].mjd) tnow = Time.now() if t1[:5] != t2[:5]: # End day is different than start day, so read and concatenate two fdb files fdb = {} fdb1 = dump_tsys.rd_fdb(trange[0]) fdb2 = dump_tsys.rd_fdb(trange[1]) for key in fdb1.keys(): fdb.update({key: np.append(fdb1[key], fdb2[key])}) else: # Both start and end times are on the same day fdb = dump_tsys.rd_fdb(trange[0]) scanidx, = np.where(fdb['PROJECTID'] == 'PHASECAL') scans, sidx = np.unique(fdb['SCANID'][scanidx], return_index=True) eidx = np.append(sidx[1:], len(scanidx)) - 1 # List of PHASECAL scan start times tslist = Time(fdb['ST_TS'][scanidx[sidx]].astype(float).astype(int), format='lv') # List of PHASECAL scan end times telist = Time(fdb['EN_TS'][scanidx[eidx]].astype(float).astype(int), format='lv') # Remove any bad values (i.e. those with ST_SEC = 0) good, = np.where(fdb['ST_SEC'][scanidx[sidx]] != '0') tslist = tslist[good] telist = telist[good] k = 0 # Number of scans within timerange m = 0 # Pointer to first scan within timerange flist = [] status = [] tstlist = [] for i in range(len(tslist)): if tslist[i].jd >= trange[0].jd and telist[i].jd <= trange[1].jd: # Time is in range, so add it k += 1 else: # Time is too early, so skip it m += 1 if k == 0: print 'No phase calibration data within given time range' return None else: print 'Found', k, 'scans in timerange.' for i in range(k): f1 = fdb['FILE'][np.where(fdb['SCANID'] == scans[m + i])].astype('str') f2 = [fpath + f for f in f1] flist.append(f2) tstlist.append(tslist[m + i]) ted = telist[m + i] # Mark all files done except possibly the last fstatus = ['done'] * len(f1) # Check if last file end time is less than 10 min ago if (tnow.jd - ted.jd) < (600. / 86400): # Current time is less than 10 min after this scan fstatus[-1] = 'undone' status.append(fstatus) return {'scanlist': flist, 'status': status, 'tstlist': tstlist}
def cal_qual(t=None, savfig=True): ''' Check the quality of the total power and gain calibrations for a given date ''' import cal_header as ch from stateframe import extract import dump_tsys as dt import pipeline_cal as pc import matplotlib.pylab as plt import rstn from util import get_idbdir import socket if t is None: t = Time.now() mjd = t.mjd # First check whether the total power calibration is current caltype = 10 xml, buf = ch.read_cal(caltype, t=t) tp_mjd = Time(extract(buf, xml['SQL_timestamp']), format='lv').mjd if mjd - tp_mjd > 0.5: print 'CAL_QUAL: Warning, TP Calibration not (yet) available for this date.' # Find GCAL scan for this date fdb = dt.rd_fdb(Time(mjd, format='mjd')) gcidx, = np.where(fdb['PROJECTID'] == 'GAINCALTEST') if len(gcidx) == 1: datadir = get_idbdir(t) + fdb['FILE'][gcidx][0][3:11] + '/' # List of GCAL files gcalfile = [datadir + i for i in fdb['FILE'][gcidx]] else: print 'CAL_QUAL: Warning, no GAINCALTEST scan for this date. Will try using the GAINCALTEST from previous day.' fdb = dt.rd_fdb(Time(mjd - 1, format='mjd')) gcidx, = np.where(fdb['PROJECTID'] == 'GAINCALTEST') if len(gcidx) == 1: datadir = get_idbdir(t) # Add date path if on pipeline # if datadir.find('eovsa') != -1: datadir += fdb['FILE'][gcidx][0][3:11]+'/' host = socket.gethostname() if host == 'pipeline': datadir += fdb['FILE'][gcidx][0][3:11] + '/' # List of GCAL files gcalfile = [datadir + i for i in fdb['FILE'][gcidx]] else: print 'CAL_QUAL: Error, no GAINCALTEST scan for previous day.' return # Find SOLPNTCAL scan for this date fdb = dt.rd_fdb(Time(mjd, format='mjd')) gcidx, = np.where(fdb['PROJECTID'] == 'SOLPNTCAL') if len(gcidx) > 0: datadir = get_idbdir(t) # Add date path if on pipeline # if datadir.find('eovsa') != -1: datadir += fdb['FILE'][gcidx][0][3:11]+'/' host = socket.gethostname() if host == 'pipeline': datadir += fdb['FILE'][gcidx][0][3:11] + '/' # List of SOLPNTCAL files solpntfile = [datadir + i for i in fdb['FILE'][gcidx]] else: print 'CAL_QUAL: Error, no SOLPNTCAL scan(s) for this date.' return files = gcalfile + solpntfile outnames = [] for file in files: outnames.append( pc.udb_corr(file, calibrate=True, attncal=True, desat=True)) out = ri.read_idb(outnames, srcchk=False) nt = len(out['time']) nf = len(out['fghz']) tpfac = 500. / nf frq, flux = rstn.rd_rstnflux(t) s = rstn.rstn2ant(frq, flux, out['fghz'] * 1000., t) fluximg = s.repeat(nt).reshape(nf, nt) f, ax = plt.subplots(4, 7) f.set_size_inches(16, 7, forward=True) f.tight_layout(rect=[0.0, 0.0, 1, 0.95]) ax.shape = (2, 14) for i in range(13): for j in range(2): ax[j, i].imshow(out['p'][i, j], aspect='auto', origin='lower', vmax=np.max(s), vmin=0) ax[j, i].plot(np.clip(out['p'][i, j, int(nf / 3.)] / tpfac, 0, nf), linewidth=1) ax[j, i].plot(np.clip(out['p'][i, j, int(2 * nf / 3.)] / tpfac, 0, nf), linewidth=1) ax[j, i].set_title('Ant ' + str(i + 1) + [' X Pol', ' Y Pol'][j], fontsize=10) for j in range(2): ax[j, 13].imshow(fluximg, aspect='auto', origin='lower', vmax=np.max(s), vmin=0) ax[j, 13].set_title('RSTN Flux', fontsize=10) for i in range(13): for j in range(2): ax[j, i].plot(np.clip(fluximg[int(nf / 3.)] / tpfac, 0, nf), '--', linewidth=1, color='C0') ax[j, i].plot(np.clip(fluximg[int(2 * nf / 3.)] / tpfac, 0, nf), '--', linewidth=1, color='C1') f.suptitle('Total Power Calibration Quality for ' + t.iso[:10]) date = t.iso[:10].replace('-', '') if savfig: try: plt.savefig('/common/webplots/flaremon/daily/' + date[:4] + '/QUAL_' + date + 'TP.png') except: plt.savefig('/tmp/' + date[:4] + '/QUAL_' + date + 'TP.png') print 'The .png file could not be created in the /common/webplots/flaremon/daily/ folder.' print 'A copy was created in /tmp/.' f, ax = plt.subplots(4, 7) f.set_size_inches(16, 7, forward=True) f.tight_layout(rect=[0.0, 0.0, 1, 0.95]) ax.shape = (2, 14) for i in range(13): for j in range(2): ax[j, i].imshow(np.real(out['a'][i, j]), aspect='auto', origin='lower', vmax=np.max(s), vmin=0) ax[j, i].plot(np.clip(np.real(out['a'][i, j, int(nf / 3.)] / tpfac), 0, nf), linewidth=1) ax[j, i].plot(np.clip( np.real(out['a'][i, j, int(2 * nf / 3.)] / tpfac), 0, nf), linewidth=1) ax[j, i].set_title('Ant ' + str(i + 1) + [' X Pol', ' Y Pol'][j], fontsize=10) for j in range(2): ax[j, 13].imshow(fluximg, aspect='auto', origin='lower', vmax=np.max(s), vmin=0) ax[j, 13].set_title('RSTN Flux', fontsize=10) for i in range(13): for j in range(2): ax[j, i].plot(np.clip(fluximg[int(nf / 3.)] / tpfac, 0, nf), '--', linewidth=1, color='C0') ax[j, i].plot(np.clip(fluximg[int(2 * nf / 3.)] / tpfac, 0, nf), '--', linewidth=1, color='C1') f.suptitle('Cross-Power Calibration Quality for ' + t.iso[:10]) date = t.iso[:10].replace('-', '') if savfig: try: plt.savefig('/common/webplots/flaremon/daily/' + date[:4] + '/QUAL_' + date + 'XP.png') except: plt.savefig('/tmp/' + date[:4] + '/QUAL_' + date + 'XP.png') print 'The .png file could not be created in the /common/webplots/flaremon/daily/ folder.' print 'A copy was created in /tmp/.'
def trange2filelist(trange=[], verbose=False): '''This finds all solar IDB files within a timerange; Required inputs: trange - can be 1) a single string or Time() object in UTC: use the entire day, e.g., '2017-08-01' or Time('2017-08-01') if just a date, find all scans withing the same date in local time. if a complete time stamp, find the local date first (which may be different from that provided, and return all scans within that day 2) a range of Time(), e.g., Time(['2017-08-01 00:00','2017-08-01 23:00']) 3) None -- use current date Time.now() ''' import dump_tsys as dtsys if trange: if type(trange) == list or type(trange) == str: try: trange = Time(trange) except: print('trange format not recognised. Abort....') return None else: print('Please give a time range. Abort....') return None # if type(trange) == Time: try: # if single Time object, the following line would report an error nt = len(trange) if len(trange) > 1: # more than one value trange = Time([trange[0], trange[-1]]) else: # single value in a list trange = Time(np.array([-1.0, 1.0]) * 5 / 24. / 60. + trange[0].mjd, format='mjd') except: trange = Time(np.array([-1.0, 1.0]) * 5 / 24. / 60. + trange.mjd, format='mjd') t1 = trange[0].datetime t2 = trange[1].datetime daydelta = (t2.date() - t1.date()).days if t1.date() != t2.date(): # End day is different than start day, so read and concatenate two fdb files info = dtsys.rd_fdb(trange[0]) for ll in range(daydelta): info2 = dtsys.rd_fdb(Time(trange[0].mjd + ll + 1, format='mjd')) if info2: for key in info.keys(): info.update({key: np.append(info[key], info2[key])}) else: # Both start and end times are on the same day info = dtsys.rd_fdb(trange[0]) sidx = np.where( np.logical_and(info['SOURCEID'] == 'Sun', info['PROJECTID'] == 'NormalObserving') & np.logical_and(info['ST_TS'].astype(np.float) >= trange[0].lv, info['ST_TS'].astype(np.float) <= trange[1].lv)) filelist = info['FILE'][sidx] if verbose: print('{} file found in the time range from {} to {}: '.format( len(filelist), t1.strftime('%Y-%m-%d %H:%M:%S UT'), t2.strftime('%Y-%m-%d %H:%M:%S UT'))) idbdir = util.get_idbdir(t1.strftime('%Y-%m-%d')) inpath = '{}/{}/'.format(idbdir, trange[0].datetime.strftime("%Y%m%d")) filelist = [inpath + ll for ll in filelist] return filelist
def xdata_display(t,ax=None): ''' Given the time as a Time object, search the FDB file for files associated with the scan for that time and create a dynamic spectrogram on the axis specified by ax, or on a new plot if no ax. If the requested time is more than 10 minutes after the last file of that scan, returns None to indicate no plot. ''' import time import dump_tsys import get_X_data as gd import spectrogram_fit as sp fdb = dump_tsys.rd_fdb(t) # Get files from next day, in case scan extends past current day t1 = Time(t.mjd + 1,format='mjd') fdb1 = dump_tsys.rd_fdb(t1) # Concatenate the two days (if the second day exists) if fdb1 != {}: for key in fdb.keys(): fdb[key] = np.concatenate((fdb[key],fdb1[key])) # Find unique scan IDs scans, idx = np.unique(fdb['SCANID'],return_index=True) # Limit to scans in 'NormalObserving' mode good, = np.where(fdb['PROJECTID'][idx] == 'NormalObserving') if len(good) > 0: scans = scans[good] else: print 'No NormalObserving scans found.' return None, None, None # Find scanID that starts earlier than, but closest to, the current time for i,scan in enumerate(scans): dt = t - Time(time.strftime('%Y-%m-%d %H:%M:%S',time.strptime(scan,'%y%m%d%H%M%S'))) if dt.sec > 0.: iout = i scan = scans[iout] # Find files for this scan fidx, = np.where(fdb['SCANID'] == scan) tlevel = None bflag = None if len(fidx) > 0: files = fdb['FILE'][fidx] # Find out how old last file of this scan is, and proceed only if less than 20 minutes # earlier than the time given in t. try: dt = t - Time(time.strftime('%Y-%m-%d %H:%M:%S',time.strptime(files[-1],'IDB%Y%m%d%H%M%S'))) except: dt = 10000. # Forces skip of plot creation print 'Unexpected FDB file format.' scan = None if dt.sec < 1200.: # This is a currently active scan, so create the figure for i in range(len(files)): files[i] = '/data1/IDB/'+files[i] data, uvw, fghz, times = gd.get_X_data(files) if ax is not None: datstr = times[0].iso[:10] ax.set_xlabel('Time [UT on '+datstr+']') ax.set_ylabel('Frequency [GHz]') ax.set_title('EOVSA Summed Cross-Correlation Amplitude for '+datstr) sp.plot_spectrogram(fghz, times, sum(sum(abs(data[:,:]),1),0), ax=ax, logsample=None, xdata=True, cbar=True) tlevel, bflag = flaremeter(data) else: print 'Time',dt.sec,'is > 1200 s after last file of last NormalObserving scan. No plot created.' scan = None else: print 'No files found for this scan ID',scan scan = None return scan, tlevel, bflag, times
def xdata_display(t, ax=None): ''' Given the time as a Time object, search the FDB file for files associated with the scan for that time and create a dynamic spectrogram on the axis specified by ax, or on a new plot if no ax. If the requested time is more than 10 minutes after the last file of that scan, returns None to indicate no plot. ''' import time import dump_tsys import get_X_data as gd import spectrogram_fit as sp fdb = dump_tsys.rd_fdb(t) # Get files from next day, in case scan extends past current day t1 = Time(t.mjd + 1, format='mjd') fdb1 = dump_tsys.rd_fdb(t1) # Concatenate the two days (if the second day exists) if fdb1 != {}: for key in fdb.keys(): fdb[key] = np.concatenate((fdb[key], fdb1[key])) # Find unique scan IDs scans, idx = np.unique(fdb['SCANID'], return_index=True) # Limit to scans in 'NormalObserving' mode good, = np.where(fdb['PROJECTID'][idx] == 'NormalObserving') if len(good) > 0: scans = scans[good] else: print 'No NormalObserving scans found.' return None, None, None # Find scanID that starts earlier than, but closest to, the current time for i, scan in enumerate(scans): dt = t - Time( time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(scan, '%y%m%d%H%M%S'))) if dt.sec > 0.: iout = i scan = scans[iout] # Find files for this scan fidx, = np.where(fdb['SCANID'] == scan) tlevel = None bflag = None if len(fidx) > 0: files = fdb['FILE'][fidx] # Find out how old last file of this scan is, and proceed only if less than 20 minutes # earlier than the time given in t. try: dt = t - Time( time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(files[-1], 'IDB%Y%m%d%H%M%S'))) except: dt = 10000. # Forces skip of plot creation print 'Unexpected FDB file format.' scan = None if dt.sec < 1200.: # This is a currently active scan, so create the figure for i in range(len(files)): files[i] = '/data1/IDB/' + files[i] data, uvw, fghz, times = gd.get_X_data(files) if ax is not None: datstr = times[0].iso[:10] ax.set_xlabel('Time [UT on ' + datstr + ']') ax.set_ylabel('Frequency [GHz]') ax.set_title('EOVSA Summed Cross-Correlation Amplitude for ' + datstr) sp.plot_spectrogram(fghz, times, sum(sum(abs(data[:, :]), 1), 0), ax=ax, logsample=None, xdata=True, cbar=True) tlevel, bflag = flaremeter(data) else: print 'Time', dt.sec, 'is > 1200 s after last file of last NormalObserving scan. No plot created.' scan = None else: print 'No files found for this scan ID', scan scan = None return scan, tlevel, bflag, times
def get_attncal(trange, do_plot=False): ''' Finds GAINCALTEST scans from FDB files corresponding to the days present in trange Time() object (can be multiple days), calculates the attenuation differences for the various FEMATTN states 1-8 relative to FEMATTN state 0, and optionally plots the results for states 1 and 2 (the most commonly used). To analyze only a single day, trange Time() object can have the same time repeated, or can be a single time. Returns a list of dictionaries, each pertaining to one of the days in trange, with keys defined as follows: 'time': The start time of the GAINCALTEST scan, as a Time() object 'fghz': The list of frequencies [GHz] at which attenuations are measured 'attn': The array of attenuations [dB] of size (nattn, nant, npol, nf), where nattn = 8, nant = 13, npol = 2, and nf is variable 'rcvr': The array of receiver noise level (raw units) of size (nant, npol, nf), where nant = 13, npol = 2, and nf is variable N.B.: Ignores days with other than one GAINCALTEST measurement, e.g. 0 or 2, the first is obvious, while the second is because there is no way to tell which of the 2 are good. ''' if type(trange.mjd) == np.float: # Interpret single time as both start and end time mjd1 = int(trange.mjd) mjd2 = mjd1 else: mjd1, mjd2 = trange.mjd.astype(int) if do_plot: import matplotlib.pylab as plt f, ax = plt.subplots(4, 13) f.set_size_inches((14, 5)) ax[0, 0].set_ylabel('Atn1X [dB]') ax[1, 0].set_ylabel('Atn1Y [dB]') ax[2, 0].set_ylabel('Atn2X [dB]') ax[3, 0].set_ylabel('Atn2Y [dB]') for i in range(13): ax[0, i].set_title('Ant ' + str(i + 1)) ax[3, i].set_xlabel('Freq [GHz]') for j in range(2): ax[j, i].set_ylim(1, 3) ax[j + 2, i].set_ylim(3, 5) outdict = [] for mjd in range(mjd1, mjd2 + 1): fdb = dt.rd_fdb(Time(mjd, format='mjd')) gcidx, = np.where(fdb['PROJECTID'] == 'GAINCALTEST') if len(gcidx) == 1: print fdb['FILE'][gcidx] file = '/data1/eovsa/fits/IDB/' + fdb['FILE'][gcidx][0][ 3:11] + '/' + fdb['FILE'][gcidx][0] out = ri.read_idb([file]) vx = np.mean(out['p'][:13, :, :, 6:12], 3) val0 = np.median(out['p'][:13, :, :, 16:22], 3) - vx val1 = np.median(out['p'][:13, :, :, 26:32], 3) - vx val2 = np.median(out['p'][:13, :, :, 36:42], 3) - vx val3 = np.median(out['p'][:13, :, :, 46:52], 3) - vx val4 = np.median(out['p'][:13, :, :, 56:62], 3) - vx val5 = np.median(out['p'][:13, :, :, 66:72], 3) - vx val6 = np.median(out['p'][:13, :, :, 76:82], 3) - vx val7 = np.median(out['p'][:13, :, :, 86:92], 3) - vx val8 = np.median(out['p'][:13, :, :, 96:102], 3) - vx attn1 = np.log10(val0 / val1) * 10. attn2 = np.log10(val0 / val2) * 10. attn3 = np.log10(val0 / val3) * 10. attn4 = np.log10(val0 / val4) * 10. attn5 = np.log10(val0 / val5) * 10. attn6 = np.log10(val0 / val6) * 10. attn7 = np.log10(val0 / val7) * 10. attn8 = np.log10(val0 / val8) * 10. if do_plot: for i in range(13): for j in range(2): ax[j, i].plot(out['fghz'], attn1[i, j], '.') ax[j + 2, i].plot(out['fghz'], attn2[i, j], '.') outdict.append({ 'time': Time(out['time'][0], format='jd'), 'fghz': out['fghz'], 'rcvr': vx, 'attn': np.array( [attn1, attn2, attn3, attn4, attn5, attn6, attn7, attn8]) }) return outdict
def xdata_display(t, ax=None): ''' Given the time as a Time object, search the FDB file for files associated with the scan for that time and create a dynamic spectrogram on the axis specified by ax, or on a new plot if no ax. If the requested time is more than 10 minutes after the last file of that scan, returns None to indicate no plot. Skip SK flagging [2017-Mar-20 DG] ''' import time, os import dump_tsys #import get_X_data2 as gd import read_idb as ri import spectrogram_fit as sp fdb = dump_tsys.rd_fdb(t) # Get files from next day, in case scan extends past current day t1 = Time(t.mjd + 1, format='mjd') fdb1 = dump_tsys.rd_fdb(t1) # Concatenate the two days (if the second day exists) if fdb1 != {}: for key in fdb.keys(): fdb[key] = np.concatenate((fdb[key], fdb1[key])) # Find unique scan IDs scans, idx = np.unique(fdb['SCANID'], return_index=True) # Limit to scans in 'NormalObserving' mode good, = np.where(fdb['PROJECTID'][idx] == 'NormalObserving') if len(good) > 0: scans = scans[good] else: print 'No NormalObserving scans found.' return None, None, None, None # Find scanID that starts earlier than, but closest to, the current time for i, scan in enumerate(scans): print scan dt = t - Time( time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(scan, '%y%m%d%H%M%S'))) if dt.sec > 0.: iout = i scan = scans[iout] # Find files for this scan fidx, = np.where(fdb['SCANID'] == scan) tlevel = None bflag = None if len(fidx) > 0: files = fdb['FILE'][fidx] # Find out how old last file of this scan is, and proceed only if less than 20 minutes # earlier than the time given in t. try: dt = t - Time( time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(files[-1], 'IDB%Y%m%d%H%M%S'))) except: dt = 10000. # Forces skip of plot creation print 'Unexpected FDB file format.' scan = None if dt.sec < 1200.: # This is a currently active scan, so create the figure path = '/data1/IDB/' if not os.path.isdir(path + files[0]): # Look in /dppdata1 datstr = t.iso[:10].replace('-', '') # path = '/data1/eovsa/fits/IDB/'+datstr+'/' path = get_idbdir(t) + datstr + '/' if not os.path.isdir(path + files[0]): print 'No files found for this scan ID', scan scan = None times = None return scan, tlevel, bflag, times filelist = files files = [] for i, file in enumerate(filelist): files.append(path + file) # data, uvw, fghz, times = gd.get_X_data(files) out = ri.read_idb(files) #out = ri.flag_sk(out) # Skip flagging for sk fghz = out['fghz'] times = Time(out['time'], format='jd') data = out['x'] if ax is not None: datstr = times[0].iso[:10] ax.set_xlabel('Time [UT on ' + datstr + ']') ax.set_ylabel('Frequency [GHz]') ax.set_title('EOVSA Summed Cross-Correlation Amplitude for ' + datstr) pdata = np.sum(np.sum(np.abs(data[0:11, :]), 1), 0) # Spectrogram to plot X = np.sort(pdata.flatten()) # Sorted, flattened array dmax = X[int(len(X) * 0.95)] # Clip at 5% of points sp.plot_spectrogram(fghz, times, pdata, ax=ax, logsample=None, xdata=True, cbar=True, dmax=dmax) #tlevel, bflag = flaremeter(data) else: print 'Time', dt.sec, 'is > 1200 s after last file of last NormalObserving scan. No plot created.' scan = None times = None else: print 'No files found for this scan ID', scan scan = None return scan, tlevel, bflag, times
def xdata_display(t, ax=None): """ Given the time as a Time object, search the FDB file for files associated with the scan for that time and create a dynamic spectrogram on the axis specified by ax, or on a new plot if no ax. If the requested time is more than 10 minutes after the last file of that scan, returns None to indicate no plot. """ import time import dump_tsys # import get_X_data2 as gd import read_idb as ri import spectrogram_fit as sp fdb = dump_tsys.rd_fdb(t) # Get files from next day, in case scan extends past current day t1 = Time(t.mjd + 1, format="mjd") fdb1 = dump_tsys.rd_fdb(t1) # Concatenate the two days (if the second day exists) if fdb1 != {}: for key in fdb.keys(): fdb[key] = np.concatenate((fdb[key], fdb1[key])) # Find unique scan IDs scans, idx = np.unique(fdb["SCANID"], return_index=True) # Limit to scans in 'NormalObserving' mode good, = np.where(fdb["PROJECTID"][idx] == "NormalObserving") if len(good) > 0: scans = scans[good] else: print "No NormalObserving scans found." return None, None, None # Find scanID that starts earlier than, but closest to, the current time for i, scan in enumerate(scans): dt = t - Time(time.strftime("%Y-%m-%d %H:%M:%S", time.strptime(scan, "%y%m%d%H%M%S"))) if dt.sec > 0.0: iout = i scan = scans[iout] # Find files for this scan fidx, = np.where(fdb["SCANID"] == scan) tlevel = None bflag = None if len(fidx) > 0: files = fdb["FILE"][fidx] # Find out how old last file of this scan is, and proceed only if less than 20 minutes # earlier than the time given in t. try: dt = t - Time(time.strftime("%Y-%m-%d %H:%M:%S", time.strptime(files[-1], "IDB%Y%m%d%H%M%S"))) except: dt = 10000.0 # Forces skip of plot creation print "Unexpected FDB file format." scan = None if dt.sec < 1200.0: # This is a currently active scan, so create the figure for i in range(len(files)): files[i] = "/data1/IDB/" + files[i] # data, uvw, fghz, times = gd.get_X_data(files) out = ri.read_idb(files) out = ri.flag_sk(out) fghz = out["fghz"] times = Time(out["time"], format="jd") data = out["x"] if ax is not None: datstr = times[0].iso[:10] ax.set_xlabel("Time [UT on " + datstr + "]") ax.set_ylabel("Frequency [GHz]") ax.set_title("EOVSA Summed Cross-Correlation Amplitude for " + datstr) sp.plot_spectrogram( fghz, times, sum(sum(abs(data[0:11, :]), 1), 0), ax=ax, logsample=None, xdata=True, cbar=True ) tlevel, bflag = flaremeter(data) else: print "Time", dt.sec, "is > 1200 s after last file of last NormalObserving scan. No plot created." scan = None else: print "No files found for this scan ID", scan scan = None return scan, tlevel, bflag, times
def xdata_display(t,ax=None): ''' Given the time as a Time object, search the FDB file for files associated with the scan for that time and create a dynamic spectrogram on the axis specified by ax, or on a new plot if no ax. If the requested time is more than 10 minutes after the last file of that scan, returns None to indicate no plot. Skip SK flagging [2017-Mar-20 DG] ''' import time, os import dump_tsys #import get_X_data2 as gd import read_idb as ri import spectrogram_fit as sp fdb = dump_tsys.rd_fdb(t) # Get files from next day, in case scan extends past current day t1 = Time(t.mjd + 1,format='mjd') fdb1 = dump_tsys.rd_fdb(t1) # Concatenate the two days (if the second day exists) if fdb1 != {}: for key in fdb.keys(): fdb[key] = np.concatenate((fdb[key],fdb1[key])) # Find unique scan IDs scans, idx = np.unique(fdb['SCANID'],return_index=True) # Limit to scans in 'NormalObserving' mode good, = np.where(fdb['PROJECTID'][idx] == 'NormalObserving') if len(good) > 0: scans = scans[good] else: print 'No NormalObserving scans found.' return None, None, None # Find scanID that starts earlier than, but closest to, the current time for i,scan in enumerate(scans): dt = t - Time(time.strftime('%Y-%m-%d %H:%M:%S',time.strptime(scan,'%y%m%d%H%M%S'))) if dt.sec > 0.: iout = i scan = scans[iout] # Find files for this scan fidx, = np.where(fdb['SCANID'] == scan) tlevel = None bflag = None if len(fidx) > 0: files = fdb['FILE'][fidx] # Find out how old last file of this scan is, and proceed only if less than 20 minutes # earlier than the time given in t. try: dt = t - Time(time.strftime('%Y-%m-%d %H:%M:%S',time.strptime(files[-1],'IDB%Y%m%d%H%M%S'))) except: dt = 10000. # Forces skip of plot creation print 'Unexpected FDB file format.' scan = None if dt.sec < 1200.: # This is a currently active scan, so create the figure path = '/data1/IDB/' if not os.path.isdir(path+files[0]): # Look in /dppdata1 datstr = t.iso[:10].replace('-','') path = '/data1/eovsa/fits/IDB/'+datstr+'/' if not os.path.isdir(path+files[0]): print 'No files found for this scan ID',scan scan = None return scan, tlevel, bflag, times filelist = files files = [] for i,file in enumerate(filelist): files.append(path+file) # data, uvw, fghz, times = gd.get_X_data(files) out = ri.read_idb(files) #out = ri.flag_sk(out) # Skip flagging for sk fghz = out['fghz'] times = Time(out['time'],format='jd') data = out['x'] if ax is not None: datstr = times[0].iso[:10] ax.set_xlabel('Time [UT on '+datstr+']') ax.set_ylabel('Frequency [GHz]') ax.set_title('EOVSA Summed Cross-Correlation Amplitude for '+datstr) pdata = np.sum(np.sum(np.abs(data[0:11,:]),1),0) # Spectrogram to plot X = np.sort(pdata.flatten()) # Sorted, flattened array dmax = X[int(len(X)*0.95)] # Clip at 5% of points sp.plot_spectrogram(fghz, times, pdata, ax=ax, logsample=None, xdata=True, cbar=True, dmax=dmax) #tlevel, bflag = flaremeter(data) else: print 'Time',dt.sec,'is > 1200 s after last file of last NormalObserving scan. No plot created.' scan = None else: print 'No files found for this scan ID',scan scan = None return scan, tlevel, bflag, times
def get_attncal(trange, do_plot=False, dataonly=False): ''' Finds GAINCALTEST scans from FDB files corresponding to the days present in trange Time() object (can be multiple days), calculates the attenuation differences for the various FEMATTN states 1-8 relative to FEMATTN state 0, and optionally plots the results for states 1 and 2 (the most commonly used). To analyze only a single day, trange Time() object can have the same time repeated, or can be a single time. Returns a list of dictionaries, each pertaining to one of the days in trange, with keys defined as follows: 'time': The start time of the GAINCALTEST scan, as a Time() object 'fghz': The list of frequencies [GHz] at which attenuations are measured 'attn': The array of attenuations [dB] of size (nattn, nant, npol, nf), where nattn = 8, nant = 13, npol = 2, and nf is variable 'rcvr': The array of receiver noise level (raw units) of size (nant, npol, nf), where nant = 13, npol = 2, and nf is variable 'rcvr_auto': Same as rcvr, but for auto-correlation (hence it is complex) N.B.: Ignores days with other than one GAINCALTEST measurement, e.g. 0 or 2, the first is obvious, while the second is because there is no way to tell which of the 2 are good. The dataonly parameter tells the routine to skip calculating the attenuation and only return the IDB data from the (first) gaincal. ''' from util import get_idbdir, fname2mjd, nearest_val_idx import socket import dbutil if type(trange.mjd) == np.float: # Interpret single time as both start and end time mjd1 = int(trange.mjd) mjd2 = mjd1 else: mjd1, mjd2 = trange.mjd.astype(int) if do_plot: import matplotlib.pylab as plt f, ax = plt.subplots(4, 13) f.set_size_inches((14, 5)) ax[0, 0].set_ylabel('Atn1X [dB]') ax[1, 0].set_ylabel('Atn1Y [dB]') ax[2, 0].set_ylabel('Atn2X [dB]') ax[3, 0].set_ylabel('Atn2Y [dB]') for i in range(13): ax[0, i].set_title('Ant ' + str(i + 1)) ax[3, i].set_xlabel('Freq [GHz]') for j in range(2): ax[j, i].set_ylim(1, 3) ax[j + 2, i].set_ylim(3, 5) outdict = [] for mjd in range(mjd1, mjd2 + 1): fdb = dt.rd_fdb(Time(mjd, format='mjd')) gcidx, = np.where(fdb['PROJECTID'] == 'GAINCALTEST') if len(gcidx) == 1: print fdb['FILE'][gcidx] gcidx = gcidx[0] else: for i, fname in enumerate(fdb['FILE'][gcidx]): print str(i) + ': GAINCALTEST File', fname idex = input('There is more than one GAINCALTEST. Select: ' + str(np.arange(len(gcidx))) + ':') gcidx = gcidx[idex] datadir = get_idbdir(Time(mjd, format='mjd')) # Add date path if on pipeline # if datadir.find('eovsa') != -1: datadir += fdb['FILE'][gcidx][3:11]+'/' host = socket.gethostname() if host == 'pipeline': datadir += fdb['FILE'][gcidx][3:11] + '/' file = datadir + fdb['FILE'][gcidx] out = ri.read_idb([file]) if dataonly: return out # Get time from filename and read 120 records of attn state from SQL database filemjd = fname2mjd(fdb['FILE'][gcidx]) cursor = dbutil.get_cursor() d15 = dbutil.get_dbrecs(cursor, dimension=15, timestamp=Time(filemjd, format='mjd'), nrecs=120) cursor.close() # Find time indexes of the 62 dB attn state # Uses only ant 1 assuming all are the same dtot = (d15['Ante_Fron_FEM_HPol_Atte_Second'] + d15['Ante_Fron_FEM_HPol_Atte_First'])[:, 0] # Use system clock day number to identify bad SQL entries and eliminate them good, = np.where(d15['Ante_Cont_SystemClockMJDay'][:, 0] != 0) #import pdb; pdb.set_trace() # Indexes into SQL records where a transition occurred. transitions, = np.where(dtot[good] - np.roll(dtot[good], 1) != 0) # Eliminate any zero-index transition (if it exists) if transitions[0] == 0: transitions = transitions[1:] # These now have to be translated into indexes into the data, using the times idx = nearest_val_idx(d15['Timestamp'][good, 0][transitions], Time(out['time'], format='jd').lv) #import pdb; pdb.set_trace() vx = np.nanmedian( out['p'][:13, :, :, np.arange(idx[0] + 1, idx[1] - 1)], 3) va = np.mean(out['a'][:13, :2, :, np.arange(idx[0] + 1, idx[1] - 1)], 3) vals = [] attn = [] for i in range(1, 10): vals.append( np.nanmedian( out['p'][:13, :, :, np.arange(idx[i] + 1, idx[i + 1] - 1)], 3) - vx) attn.append(np.log10(vals[0] / vals[-1]) * 10.) #vals = [] #attna = [] #for i in range(1,10): # vals.append(np.median(out['a'][:13,:2,:,np.arange(idx[i],idx[i+1])],3) - va) # attna.append(np.log10(vals[0]/vals[-1])*10.) if do_plot: for i in range(13): for j in range(2): ax[j, i].plot(out['fghz'], attn[1][i, j], '.', markersize=3) #ax[j,i].plot(out['fghz'],attna[1][i,j],'.',markersize=1) ax[j + 2, i].plot(out['fghz'], attn[2][i, j], '.', markersize=3) #ax[j+2,i].plot(out['fghz'],attna[2][i,j],'.',markersize=1) outdict.append({ 'time': Time(out['time'][0], format='jd'), 'fghz': out['fghz'], 'rcvr_auto': va, # 'attna': np.array(attna[1:]), 'rcvr': vx, 'attn': np.array(attn[1:]) }) return outdict
def findfile(trange): from util import nearest_val_idx import struct, time, glob, sys, socket import dump_tsys host = socket.gethostname() if host == 'dpp': fpath = '/data1/IDB/' else: fpath = '/data1/eovsa/fits/IDB/' t1 = str(trange[0].mjd) t2 = str(trange[1].mjd) tnow = Time.now() if t1[:5] != t2[:5]: # End day is different than start day, so read and concatenate two fdb files fdb = {} fdb1 = dump_tsys.rd_fdb(trange[0]) fdb2 = dump_tsys.rd_fdb(trange[1]) for key in fdb1.keys(): fdb.update({key:np.append(fdb1[key],fdb2[key])}) else: # Both start and end times are on the same day fdb = dump_tsys.rd_fdb(trange[0]) scanidx, = np.where(fdb['PROJECTID'] == 'PHASECAL') scans,sidx = np.unique(fdb['SCANID'][scanidx],return_index=True) eidx = np.append(sidx[1:],len(scanidx)) - 1 # List of PHASECAL scan start times tslist = Time(fdb['ST_TS'][scanidx[sidx]].astype(float).astype(int),format='lv') # List of PHASECAL scan end times telist = Time(fdb['EN_TS'][scanidx[eidx]].astype(float).astype(int),format='lv') # Remove any bad values (i.e. those with ST_SEC = 0) good, = np.where(fdb['ST_SEC'][scanidx[sidx]] != '0') tslist = tslist[good] telist = telist[good] k = 0 # Number of scans within timerange m = 0 # Pointer to first scan within timerange flist = [] status = [] tstlist = [] for i in range(len(tslist)): if tslist[i].jd >= trange[0].jd and telist[i].jd <= trange[1].jd: # Time is in range, so add it k += 1 else: # Time is too early, so skip it m += 1 if k == 0: print 'No phase calibration data within given time range' return None else: print 'Found',k,'scans in timerange.' for i in range(k): f1 = fdb['FILE'][np.where(fdb['SCANID'] == scans[m+i])].astype('str') f2 = [fpath + f for f in f1] flist.append(f2) tstlist.append(tslist[m+i]) ted = telist[m+i] # Mark all files done except possibly the last fstatus = ['done']*len(f1) # Check if last file end time is less than 10 min ago if (tnow.jd - ted.jd) < (600./86400): # Current time is less than 10 min after this scan fstatus[-1] = 'undone' status.append(fstatus) return {'scanlist':flist,'status':status,'tstlist':tstlist}
def calpnt_multi(trange,ant_str='ant1-13',do_plot=True,outfile=None): ''' Runs calpntanal() for all scans within the given time range. trange Time object with start and end time over which scans should be identified and analyzed. Returns a list of dictionaries containing Source name, Time, HA, Dec, RA offset and Dec offset ''' import read_idb fdb = dump_tsys.rd_fdb(trange[0]) scanidx, = np.where(fdb['PROJECTID'] == 'CALPNTCAL') calpnt2m = False # Flag to indicate whether we are doing CALPNT2M analysis if scanidx.size == 0: # No CALPNTCAL scans, so search for CALPNT2M scanidx, = np.where(fdb['PROJECTID'] == 'CALPNT2M') if scanidx.size == 0: print 'No CALPNTCAL or CALPNT2M project IDs found for date'+t.iso[:10] return {} calpnt2m = True scans,sidx = np.unique(fdb['SCANID'][scanidx],return_index=True) tlist = Time(fdb['ST_TS'][scanidx[sidx]].astype(float).astype(int),format='lv') telist = Time(fdb['EN_TS'][scanidx[sidx]].astype(float).astype(int),format='lv') idx = read_idb.p.ant_str2list(ant_str) out = [] if outfile is None: outfile = '/common/tmp/Pointing/'+tlist[0].iso[:10].replace('-','')+'_calpnt.txt' k = -1 # First find out how many pointings there will be npt = 0 for i,t in enumerate(tlist): if t.jd >= trange[0].jd and telist[i].jd <= trange[1].jd: npt += 1 for i,t in enumerate(tlist): if t.jd >= trange[0].jd and telist[i].jd <= trange[1].jd: k += 1 # Plot-axis row pointer if do_plot: if k % 6 == 0: nrows = min(npt-k,6) # Number of rows in this plot--up to 6 if calpnt2m: f, ax = plt.subplots(nrows,2*idx.size) f.set_size_inches(2.5*idx.size,1.5*nrows,forward=True) plt.subplots_adjust(left=0.03, right=0.97, top=0.89, bottom=0.3, wspace=0.1, hspace=0.3) else: f, ax = plt.subplots(nrows,2) f.set_size_inches(2.5,1.5*nrows,forward=True) k = 0 # Reset row pointer to top of new plot else: # Kill previous row's xaxis tick labels so that title is visible if calpnt2m: for j in range(idx.size*2): ax[k-1,j].xaxis.set_ticklabels([]) ax[k-1,j].set_xlabel('') else: for j in range(2): ax[k-1,j].xaxis.set_ticklabels([]) ax[k-1,j].set_xlabel('') try: out.append(calpntanal(t,ant_str=ant_str,do_plot=do_plot,ax=ax[k])) except: out.append({}) else: try: out.append(calpntanal(t,ant_str=ant_str)) except: out.append({}) src = out[-1] if src == {}: print 'Scan starting at',t.iso,'failed.' else: if calpnt2m: if k == 0: # Print heading to screen and to file heading = ' Source Date Time HA Dec ' for iant in idx: heading += ' Ant {:2d} '.format(iant+1) print heading # Write heading to file only if the file has not been opened/created yet. if not os.path.isfile(outfile): f = open(outfile,'w') f.write(heading+'\n') f.close() rao_deco = '' for j in range(idx.size): rao_deco += ' {:7.3f} {:7.3f}'.format(src['rao'][j],src['deco'][j]) line = '{:s} {:s} {:6.2f} {:6.2f}'.format(src['source'], src['time'].iso[:19], src['ha'],src['dec']) line += rao_deco else: if k == 0: # Print heading to screen and to file heading = ' Source Date Time HA Dec Ant 14' print heading # Write heading to file only if the file has not been opened/created yet. if not os.path.isfile(outfile): f = open(outfile,'w') f.write(heading+'\n') f.close() line = '{:s} {:s} {:6.2f} {:6.2f} {:7.3f} {:7.3f}'.format(src['source'], src['time'].iso[:19], src['ha'],src['dec'], src['rao'],src['deco']) print line f = open(outfile,'a') # This should create the file if it does not exist, or append otherwise. f.write(line+'\n') f.close() return out
def calpntanal(t,ant_str='ant1-13',do_plot=True,ax=None): ''' Does a complete analysis of CALPNTCAL, reading information from the SQL database, finding the corresponding Miriad IDB data, and doing the gaussian fit to the beam, to return the beam and offset parameters. t Time object with a time near the start of the desired scan (finds the scan that starts closest time to the given time) ax If specified as a two-element array of axes, the plots will be placed in an already existing window, allowing reuse of the same window. Otherwise, a new figure is created (if do_plot is True). Returns a dictionary containing Source name, Time, HA, Dec, RA offset and Dec offset ''' import matplotlib.pyplot as plt from matplotlib.transforms import Bbox import read_idb import dbutil as db bl2ord = read_idb.bl2ord tdate = t.iso.replace('-','')[:8] fdir = '/data1/eovsa/fits/IDB/'+tdate+'/' fdb = dump_tsys.rd_fdb(t) scanidx, = np.where(fdb['PROJECTID'] == 'CALPNTCAL') # Set offset coordinates appropriate to the type of PROJECTID found if scanidx.size == 0: # No CALPNTCAL scans, so search for CALPNT2M scanidx, = np.where(fdb['PROJECTID'] == 'CALPNT2M') if scanidx.size == 0: print 'No CALPNTCAL or CALPNT2M project IDs found for date'+t.iso[:10] return {} else: # Found CALPNT2M, so set offset coordinates to match calpnt2m.trj rao = np.array([-5.00, -2.0, -1.0, -0.5, 0.00, 0.5, 1.0, 2.0]) deco = np.array([-2.0, -1.0, -0.5, 0.00, 0.5, 1.0, 2.0, 5.00]) pltfac = 10. else: # Found CALPNTCAL, so set offset coordinates to match calpnt.trj rao = np.array([-1.00, -0.20, -0.10, -0.05, 0.00, 0.05, 0.10, 0.20]) deco = np.array([-0.20, -0.10, -0.05, 0.00, 0.05, 0.10, 0.20, 1.00]) pltfac = 1. scans,sidx = np.unique(fdb['SCANID'][scanidx],return_index=True) tlist = Time(fdb['ST_TS'][scanidx[sidx]].astype(float).astype(int),format='lv') idx, = nearest_val_idx([t.jd],tlist.jd) filelist = [fdir+f for f in fdb['FILE'][np.where(fdb['SCANID'] == scans[idx])]] # Read pointing data (timerange t must be accurate) out = read_idb.read_idb(filelist, navg=30) # Determine wanted baselines with ant 14 from ant_str idx = read_idb.p.ant_str2list(ant_str) idx1 = idx[idx>7] # Ants > 8 idx2 = idx[idx<8] # Ants <= 8 # Determine parallactic angle for azel antennas (they are all the same, so find median). # If 0 < abs(chi) < 30, use channel XX # If 30 < abs(chi) < 60, use sum of channel XX and XY # If 60 < abs(chi) < 90, use channel XY midtime = Time((out['time'][0] + out['time'][-1])/2.,format='jd') times, chi = db.get_chi(Time([midtime.lv+1,midtime.lv + 10],format='lv')) abschi = abs(lobe(np.median(chi[0,0:8]))) if pltfac == 1.: # Case of 27m antenna pointing # Do appropriate sums over frequency, polarization and baseline if abschi >= 0 and abschi < np.pi/6: pntdata = np.sum(np.abs(np.sum(out['x'][bl2ord[idx,13],0,:,:48],1)),0) # Use only XX elif abschi >= np.pi/6 and abschi < np.pi/3: pntdata1 = np.sum(np.abs(np.sum(out['x'][bl2ord[idx1,13],0,:,:48],1)),0) # Use XX only for ants > 8 pntdata2 = np.sum(np.abs(np.sum(out['x'][bl2ord[idx2,13],:,:,:48],2)),0) # Use sum of XX and XY for ants <= 8 pntdata = pntdata1 + np.sum(pntdata2[np.array([0,2])],0) else: pntdata1 = np.sum(np.abs(np.sum(out['x'][bl2ord[idx1,13],0,:,:48],1)),0) # Use XX only for ants > 8 pntdata2 = np.sum(np.abs(np.sum(out['x'][bl2ord[idx2,13],2,:,:48],1)),0) # Use sum of XY for ants <= 8 pntdata = pntdata1 + pntdata2 # Measurements are 90 s long, hence 3 consecutive 30 s points, so do final # sum over these pntdata.shape = (16,3) stdev = np.std(pntdata,1) pntdata = np.sum(pntdata,1) radat = pntdata[:8] decdat = pntdata[8:] plsqr, xr, yr = solpnt.gausfit(rao, radat) plsqd, xd, yd = solpnt.gausfit(deco, decdat) midtime = Time((out['time'][0] + out['time'][-1])/2.,format='jd') if (do_plot): if ax is None: f, ax = plt.subplots(1,2) f.set_size_inches(2.5,1.5,forward=True) ax[0].errorbar(rao,radat,yerr=stdev[:8],fmt='.') ax[0].plot(xr,yr) ax[0].axvline(x=0,color='k') ax[0].axvline(x=plsqr[1],linestyle='--') ax[1].errorbar(deco,decdat,yerr=stdev[8:],fmt='.') ax[1].plot(xd,yd) ax[1].axvline(x=0,color='k') ax[1].axvline(x=plsqd[1],linestyle='--') for j in range(2): ax[j].set_xlim(-0.3, 0.3) ax[j].grid() ax[0].text(0.05,0.9,'RAO :'+str(plsqr[1])[:5],transform=ax[0].transAxes) ax[0].text(0.55,0.9,'FWHM:'+str(plsqr[2])[:5],transform=ax[0].transAxes) ax[0].set_xlabel('RA Offset [deg]') ax[1].text(0.05,0.9,'DECO:'+str(plsqd[1])[:5],transform=ax[1].transAxes) ax[1].text(0.55,0.9,'FWHM:'+str(plsqd[2])[:5],transform=ax[1].transAxes) ax[1].set_xlabel('Dec Offset [deg]') if ax is None: f.suptitle('Pointing on '+out['source']+' at '+midtime.iso) else: ax[0].set_title(out['source']+' at') ax[1].set_title(midtime.iso[:16]) plt.pause(0.5) return {'source':out['source'],'ha':out['ha'][24]*180./np.pi,'dec':out['dec']*180/np.pi, 'rao':plsqr[1],'deco':plsqd[1],'time':midtime,'antidx':13} else: # Case of 2m antenna pointing # Do appropriate sum over frequency and polarization but not baseline if abschi >= 0 and abschi < np.pi/6: pntdata = np.abs(np.sum(out['x'][bl2ord[idx,13],0,:,:48],1)) # Use only XX elif abschi >= np.pi/6 and abschi < np.pi/3: pntdata1 = np.abs(np.sum(out['x'][bl2ord[idx1,13],0,:,:48],1)) # Use XX only for ants > 8 pntdata2 = np.abs(np.sum(out['x'][bl2ord[idx2,13],:,:,:48],2)) # Use sum of XX and XY for ants <= 8 pntdata = np.concatenate((pntdata1,np.sum(pntdata2[:,np.array([0,2])],1)),0) else: pntdata1 = np.abs(np.sum(out['x'][bl2ord[idx1,13],0,:,:48],1)) # Use XX only for ants > 8 pntdata2 = np.abs(np.sum(out['x'][bl2ord[idx2,13],2,:,:48],1)) # Use sum of XY for ants <= 8 pntdata = np.concatenate((pntdata1,pntdata2),0) # Measurements are 90 s long, hence 3 consecutive 30 s points, so do final # sum over these pntdata.shape = (idx.size,16,3) stdev = np.std(pntdata,2) pntdata = np.sum(pntdata,2) rao_fit = np.zeros(idx.size,float) deco_fit = np.zeros(idx.size,float) if (do_plot): if ax is None: f, ax = plt.subplots(1,2*idx.size) f.set_size_inches(2.5*idx.size,1.5,forward=True) plt.subplots_adjust(left=0.03, right=0.97, top=0.89, bottom=0.3, wspace=0.1, hspace=0.1) for k in range(idx.size): radat = pntdata[k,:8] decdat = pntdata[k,8:] plsqr, xr, yr = solpnt.gausfit(rao, radat) plsqd, xd, yd = solpnt.gausfit(deco, decdat) midtime = Time((out['time'][0] + out['time'][-1])/2.,format='jd') if (do_plot): ax[k*2+0].errorbar(rao,radat,yerr=stdev[k,:8],fmt='.') ax[k*2+0].plot(xr,yr) ax[k*2+0].axvline(x=0,color='k') ax[k*2+0].axvline(x=plsqr[1],linestyle='--') ax[k*2+1].errorbar(deco,decdat,yerr=stdev[k,8:],fmt='.') ax[k*2+1].plot(xd,yd) ax[k*2+1].axvline(x=0,color='k') ax[k*2+1].axvline(x=plsqd[1],linestyle='--') for j in range(2): ax[k*2+j].set_xlim(-3.0, 3.0) ax[k*2+j].grid() ax[k*2+0].text(0.05,0.7,'RAO :'+str(plsqr[1])[:5],transform=ax[k*2+0].transAxes,fontsize=9) ax[k*2+0].text(0.05,0.5,'FWHM:'+str(plsqr[2])[:5],transform=ax[k*2+0].transAxes,fontsize=9) ax[k*2+0].set_xlabel('RAO [deg]',fontsize=9) ax[k*2+1].text(-0.2,0.85,'Ant :'+str(idx[k]+1),transform=ax[k*2+1].transAxes,fontsize=9) ax[k*2+1].text(0.05,0.7,'DECO:'+str(plsqd[1])[:5],transform=ax[k*2+1].transAxes,fontsize=9) ax[k*2+1].text(0.05,0.5,'FWHM:'+str(plsqd[2])[:5],transform=ax[k*2+1].transAxes,fontsize=9) ax[k*2+1].set_yticklabels([]) ax[k*2+1].set_xlabel('DECO [deg]',fontsize=9) if k == idx.size-1: ax[k+0].set_title(out['source']+' at',fontsize=9) ax[k+1].set_title(midtime.iso[:16],fontsize=9) # Adjust the pairs of subplots to group RA/Dec for each antenna together ax[k*2+0].set_position(Bbox(ax[k*2+0].get_position().get_points() + np.array([[0.0025,0],[0.0025,0]]))) ax[k*2+1].set_position(Bbox(ax[k*2+1].get_position().get_points() + np.array([[-0.0025,0],[-0.0025,0]]))) # Set to the same amplitude scale ymin = min(ax[k*2+0].get_ylim()[0],ax[k*2+1].get_ylim()[0]) ymax = max(ax[k*2+0].get_ylim()[1],ax[k*2+1].get_ylim()[1]) ax[k*2+0].set_ylim(ymin,ymax) ax[k*2+1].set_ylim(ymin,ymax) plt.pause(0.5) rao_fit[k] = plsqr[1] deco_fit[k] = plsqd[1] return {'source':out['source'],'ha':out['ha'][24]*180./np.pi,'dec':out['dec']*180/np.pi, 'rao':rao_fit,'deco':deco_fit,'time':midtime,'antidx':idx}