def time2filename(msfile, timerange='', spw=''): from astropy.time import Time tb.open(msfile) starttim = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') endtim = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() datstr = starttim.iso[:10] ms.open(msfile) metadata = ms.metadata() observatory = metadata.observatorynames()[0] ms.close() if timerange is None or timerange == '': starttim1 = starttim endtim1 = endtim else: (tstart, tend) = timerange.split('~') if tstart[2] == ':': starttim1 = Time(datstr + 'T' + tstart) endtim1 = Time(datstr + 'T' + tend) else: starttim1 = Time(qa.quantity(tstart, 'd')['value'], format='mjd') endtim1 = Time(qa.quantity(tend, 'd')['value'], format='mjd') midtime = Time((starttim1.mjd + endtim1.mjd) / 2., format='mjd') tstr = midtime.to_datetime().strftime('{}_%Y%m%dT%H%M%S'.format(observatory)) if spw: spstr = 'spw{}'.format(spw.replace('~', '-')) filename = '.'.join([tstr, spstr]) else: filename = tstr return filename
def get_trange(msfile): from astropy.time import Time tb.open(msfile) tr = np.array([tb.getcell('TIME', 0), tb.getcell('TIME', tb.nrows() - 1)]) / 24. / 3600. tb.close() return Time(tr, format='mjd')
def concateovsa(msname, msfiles, visprefix='./', doclearcal=True, keep_orig_ms=False, cols2rm=["MODEL_DATA", "CORRECTED_DATA"]): concatvis = visprefix + msname msfiles_ = [] for idx, ll in enumerate(msfiles): if str(ll).endswith('/'): msfiles[idx] = str(ll)[:-1] if doclearcal: print 'Warning: Corrected column in the input ms file will be cleared!!!' for ll in msfiles: clearcal(vis=str(ll), addmodel=True) else: try: tmpdir = visprefix + '/tmp_ms/' if not os.path.exists(tmpdir): os.makedirs(tmpdir) for ll in msfiles: msfile_ = tmpdir + os.path.basename(str(ll)) msfiles_.append(msfile_) split(vis=str(ll), outputvis=msfile_, datacolumn='corrected') clearcal(vis=msfile_, addmodel=True) except: print 'Warning: Corrected column not found in the input ms file.' msfiles_ = msfiles if msfiles_: concat(vis=msfiles_, concatvis=concatvis, timesort=True) else: concat(vis=msfiles, concatvis=concatvis, timesort=True) # Change all observation ids to be the same (zero) tb.open(concatvis + '/OBSERVATION', nomodify=False) nobs = tb.nrows() tim0 = tb.getcell('TIME_RANGE', 0)[0] tim1 = tb.getcell('TIME_RANGE', nobs - 1)[1] tb.removerows([i + 1 for i in range(nobs - 1)]) tb.putcell('TIME_RANGE', 0, [tim0, tim1]) tb.close() tb.open(concatvis, nomodify=False) obsid = tb.getcol('OBSERVATION_ID') newobsid = np.zeros(len(obsid), dtype='int') tb.putcol('OBSERVATION_ID', newobsid) colnames = tb.colnames() for l in range(len(cols2rm)): if cols2rm[l] in colnames: try: tb.removecols(cols2rm[l]) except: pass tb.close() if msfiles_ != [] and msfiles_ != msfiles: for ll in msfiles_: os.system('rm -rf {}'.format(ll)) if not keep_orig_ms: for ll in msfiles: os.system('rm -rf {}'.format(ll))
def verifyMS(msname, expnumspws, expnumchan, inspw, expchanfreqs=[], ignoreflags=False): '''Function to verify spw and channels information in an MS msname --> name of MS to verify expnumspws --> expected number of SPWs in the MS expnumchan --> expected number of channels in spw inspw --> SPW ID expchanfreqs --> numpy array with expected channel frequencies ignoreflags --> do not check the FLAG column Returns a list with True or False and a state message''' msg = '' tb.open(msname + '/SPECTRAL_WINDOW') nc = tb.getcell("NUM_CHAN", inspw) nr = tb.nrows() cf = tb.getcell("CHAN_FREQ", inspw) tb.close() # After channel selection/average, need to know the exact row number to check, # ignore this check in these cases. if not ignoreflags: tb.open(msname) dimdata = tb.getcell("FLAG", 0)[0].size tb.close() if not (nr == expnumspws): msg = "Found " + str(nr) + ", expected " + str( expnumspws) + " spectral windows in " + msname return [False, msg] if not (nc == expnumchan): msg = "Found " + str(nc) + ", expected " + str( expnumchan) + " channels in spw " + str(inspw) + " in " + msname return [False, msg] if not ignoreflags and (dimdata != expnumchan): msg = "Found " + str(dimdata) + ", expected " + str( expnumchan) + " channels in FLAG column in " + msname return [False, msg] if not (expchanfreqs == []): print "Testing channel frequencies ..." # print cf # print expchanfreqs if not (expchanfreqs.size == expnumchan): msg = "Internal error: array of expected channel freqs should have dimension ", expnumchan return [False, msg] df = (cf - expchanfreqs) / expchanfreqs if not (abs(df) < 1E-8).all: msg = "channel frequencies in spw " + str( inspw ) + " differ from expected values by (relative error) " + str(df) return [False, msg] return [True, msg]
def verifyMS(msname, expnumspws, expnumchan, inspw, expchanfreqs=[], ignoreflags=False): '''Function to verify spw and channels information in an MS msname --> name of MS to verify expnumspws --> expected number of SPWs in the MS expnumchan --> expected number of channels in spw inspw --> SPW ID expchanfreqs --> numpy array with expected channel frequencies ignoreflags --> do not check the FLAG column Returns a list with True or False and a state message''' msg = '' tb.open(msname+'/SPECTRAL_WINDOW') nc = tb.getcell("NUM_CHAN", inspw) nr = tb.nrows() cf = tb.getcell("CHAN_FREQ", inspw) tb.close() # After channel selection/average, need to know the exact row number to check, # ignore this check in these cases. if not ignoreflags: tb.open(msname) dimdata = tb.getcell("FLAG", 0)[0].size tb.close() if not (nr==expnumspws): msg = "Found "+str(nr)+", expected "+str(expnumspws)+" spectral windows in "+msname return [False,msg] if not (nc == expnumchan): msg = "Found "+ str(nc) +", expected "+str(expnumchan)+" channels in spw "+str(inspw)+" in "+msname return [False,msg] if not ignoreflags and (dimdata != expnumchan): msg = "Found "+ str(dimdata) +", expected "+str(expnumchan)+" channels in FLAG column in "+msname return [False,msg] if not (expchanfreqs==[]): print "Testing channel frequencies ..." # print cf # print expchanfreqs if not (expchanfreqs.size == expnumchan): msg = "Internal error: array of expected channel freqs should have dimension ", expnumchan return [False,msg] df = (cf - expchanfreqs)/expchanfreqs if not (abs(df) < 1E-8).all: msg = "channel frequencies in spw "+str(inspw)+" differ from expected values by (relative error) "+str(df) return [False,msg] return [True,msg]
def splitX(vis, datacolumn2='MODEL_DATA', **kwargs): import os from clearcal_cli import clearcal_cli as clearcal from split_cli import split_cli as split ''' :param msfile: :param datacolumn: :param datacolumn2: :return: ''' kwargs2 = kwargs.copy() datacolumn2 = datacolumn2.upper() outmsfile = kwargs['outputvis'] if outmsfile.endswith('/'): outmsfile = outmsfile[:-1] if os.path.exists(outmsfile): os.system('rm -rf {}'.format(outmsfile)) if os.path.exists('{}.flagversions'.format(outmsfile)): os.system('rm -rf {}.flagversions'.format(outmsfile)) split(vis, **kwargs) for k in ['datacolumn', 'outputvis']: if k in kwargs2: kwargs2.pop(k) kwargs2['outputvis'] = 'tmpms.ms' kwargs2['datacolumn'] = datacolumn2.replace('_DATA', '') if os.path.exists('tmpms.ms'): os.system('rm -rf tmpms.ms') split(vis, **kwargs2) tb.open('tmpms.ms') nrows = tb.nrows() data = [] for row in tqdm(range(nrows), desc='getting {} column'.format(datacolumn2), ascii=True): data.append(tb.getcell('DATA', row)) tb.close() clearcal(outmsfile, addmodel=True) tb.open(outmsfile, nomodify=False) for row in tqdm(range(nrows), desc='writing {} column'.format(datacolumn2), ascii=True): tb.putcell(datacolumn2, row, data[row]) tb.close() os.system('rm -rf tmpms.ms') return outmsfile
def get_channel_freqs_widths(msname, spwid): ''' Get frequencies and widths of all the channels for an spw ID msname --> name of MS spwid --> spw ID Return two numpy arrays (frequencies, widths), each of the same length as the number of channels''' try: spw_table = os.path.join(msname, 'SPECTRAL_WINDOW') try: tb.open(spw_table) except RuntimeError: print 'Cannot open table: {0}'.format(spw_table) freqs = tb.getcell("CHAN_FREQ", spwid) widths = tb.getcell("CHAN_WIDTH", spwid) finally: tb.close() return freqs, widths
def getObservatoryName(ms): """ Returns the observatory name in the specified ms, using the tb tool. -- Todd Hunter """ antTable = ms + '/OBSERVATION' try: tb.open(antTable) myName = tb.getcell('TELESCOPE_NAME') tb.close() except: print("Could not open OBSERVATION table to get the telescope name: {}". format(antTable)) myName = '' return (myName)
def getChannels(msname, spwid, chanlist): '''From a list of channel indices, return their frequencies msname --> name of MS spwid --> spw ID chanlist --> list of channel indices Return a numpy array, the same size of chanlist, with the frequencies''' try: try: tb.open(msname+'/SPECTRAL_WINDOW') except: print 'Cannot open table '+msname+'SPECTRAL_WINDOW' cf = tb.getcell("CHAN_FREQ", spwid) # Get only the requested channels b = [cf[i] for i in chanlist] selchans = np.array(b) finally: tb.close() return selchans
def getChannels(msname, spwid, chanlist): '''From a list of channel indices, return their frequencies msname --> name of MS spwid --> spw ID chanlist --> list of channel indices Return a numpy array, the same size of chanlist, with the frequencies''' try: try: tb.open(msname + '/SPECTRAL_WINDOW') except: print 'Cannot open table ' + msname + 'SPECTRAL_WINDOW' cf = tb.getcell("CHAN_FREQ", spwid) # Get only the requested channels b = [cf[i] for i in chanlist] selchans = np.array(b) finally: tb.close() return selchans
def read_horizons(t0=None, dur=None, vis=None, observatory=None, verbose=False): import urllib2 import ssl if not t0 and not vis: t0 = Time.now() if not dur: dur = 1. / 60. / 24. # default to 2 minutes if t0: try: btime = Time(t0) except: print('input time ' + str(t0) + ' not recognized') return -1 if vis: if not os.path.exists(vis): print 'Input ms data ' + vis + ' does not exist! ' return -1 try: # ms.open(vis) # summary = ms.summary() # ms.close() # btime = Time(summary['BeginTime'], format='mjd') # etime = Time(summary['EndTime'], format='mjd') ## alternative way to avoid conflicts with importeovsa, if needed -- more time consuming if observatory == 'geocentric': observatory = '500' else: ms.open(vis) metadata = ms.metadata() if metadata.observatorynames()[0] == 'EVLA': observatory = '-5' elif metadata.observatorynames()[0] == 'EOVSA': observatory = '-81' elif metadata.observatorynames()[0] == 'ALMA': observatory = '-7' ms.close() tb.open(vis) btime_vis = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') etime_vis = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() if verbose: print "Beginning time of this scan " + btime_vis.iso print "End time of this scan " + etime_vis.iso # extend the start and end time for jpl horizons by 0.5 hr on each end btime = Time(btime_vis.mjd - 0.5 / 24., format='mjd') dur = etime_vis.mjd - btime_vis.mjd + 1.0 / 24. except: print 'error in reading ms file: ' + vis + ' to obtain the ephemeris!' return -1 # default the observatory to VLA, if none provided if not observatory: observatory = '-5' etime = Time(btime.mjd + dur, format='mjd') try: cmdstr = "https://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=1&TABLE_TYPE='OBSERVER'&QUANTITIES='1,17,20'&CSV_FORMAT='YES'&ANG_FORMAT='DEG'&CAL_FORMAT='BOTH'&SOLAR_ELONG='0,180'&CENTER='{}@399'&COMMAND='10'&START_TIME='".format( observatory ) + btime.iso.replace( ' ', ',' ) + "'&STOP_TIME='" + etime.iso[:-4].replace( ' ', ',' ) + "'&STEP_SIZE='1m'&SKIP_DAYLT='NO'&EXTRA_PREC='YES'&APPARENT='REFRACTED'" cmdstr = cmdstr.replace("'", "%27") try: context = ssl._create_unverified_context() f = urllib2.urlopen(cmdstr, context=context) except: f = urllib2.urlopen(cmdstr) lines = f.readlines() f.close() except: #todo use geocentric coordinate for the new VLA data import requests, collections params = collections.OrderedDict() params['batch'] = '1' params['TABLE_TYPE'] = "'OBSERVER'" params['QUANTITIES'] = "'1,17,20'" params['CSV_FORMAT'] = "'YES'" params['ANG_FORMAT'] = "'DEG'" params['CAL_FORMAT'] = "'BOTH'" params['SOLAR_ELONG'] = "'0,180'" if observatory == '500': params['CENTER'] = "'500'" else: params['CENTER'] = "'{}@399'".format(observatory) params['COMMAND'] = "'10'" params['START_TIME'] = "'{}'".format(btime.iso[:-4].replace(' ', ',')) params['STOP_TIME'] = "'{}'".format(etime.iso[:-4].replace(' ', ',')) params['STEP_SIZE'] = "'1m'" params['SKIP_DAYLT'] = "'NO'" params['EXTRA_PREC'] = "'YES'" params['APPAENT'] = "'REFRACTED'" results = requests.get("https://ssd.jpl.nasa.gov/horizons_batch.cgi", params=params) lines = [ll for ll in results.iter_lines()] nline = len(lines) istart = 0 for i in range(nline): line = lines[i] if line[0:5] == '$$SOE': # start recording istart = i + 1 if line[0:5] == '$$EOE': # end recording iend = i newlines = lines[istart:iend] nrec = len(newlines) ephem_ = [] t = [] ra = [] dec = [] p0 = [] delta = [] for line in newlines: items = line.split(',') t.append(Time(float(items[1]), format='jd').mjd) ra.append(np.radians(float(items[4]))) dec.append(np.radians(float(items[5]))) p0.append(float(items[6])) delta.append(float(items[8])) # convert list of dictionary to a dictionary of arrays ephem = {'time': t, 'ra': ra, 'dec': dec, 'p0': p0, 'delta': delta} return ephem
def compVarColTables(referencetab, testtab, varcol, tolerance=0.): '''Compare a variable column of two tables. referencetab --> a reference table testtab --> a table to verify varcol --> the name of a variable column (str) Returns True or False. ''' retval = True tb2 = casac.table() tb.open(referencetab) cnames = tb.colnames() tb2.open(testtab) col = varcol if tb.isvarcol(col) and tb2.isvarcol(col): try: # First check if tb.nrows() != tb2.nrows(): print 'Length of %s differ from %s, %s!=%s' % ( referencetab, testtab, len(rk), len(tk)) retval = False else: for therow in xrange(tb.nrows()): rdata = tb.getcell(col, therow) tdata = tb2.getcell(col, therow) # if not (rdata==tdata).all(): if not rdata.all() == tdata.all(): if (tolerance > 0.): differs = False for j in range(0, len(rdata)): ### if (type(rdata[j])==float or type(rdata[j])==int): if ((isinstance(rdata[j], float)) or (isinstance(rdata[j], int))): if (abs(rdata[j] - tdata[j]) > tolerance * abs(rdata[j] + tdata[j])): # print 'Column ', col,' differs in tables ', referencetab, ' and ', testtab # print therow, j # print rdata[j] # print tdata[j] differs = True ### elif (type(rdata[j])==list or type(rdata[j])==np.ndarray): elif (isinstance(rdata[j], list)) or (isinstance( rdata[j], np.ndarray)): for k in range(0, len(rdata[j])): if (abs(rdata[j][k] - tdata[j][k]) > tolerance * abs(rdata[j][k] + tdata[j][k])): # print 'Column ', col,' differs in tables ', referencetab, ' and ', testtab # print therow, j, k # print rdata[j][k] # print tdata[j][k] differs = True if differs: print 'ERROR: Column %s of %s and %s do not agree within tolerance %s' % ( col, referencetab, testtab, tolerance) retval = False break else: print 'ERROR: Column %s of %s and %s do not agree.' % ( col, referencetab, testtab) print 'ERROR: First row to differ is row=%s' % therow retval = False break finally: tb.close() tb2.close() else: print 'Columns are not varcolumns.' retval = False if retval: print 'Column %s of %s and %s agree' % (col, referencetab, testtab) return retval
def get_dspec(vis=None, savespec=True, specfile=None, bl='', uvrange='', field='', scan='', datacolumn='data', domedian=False, timeran=None, spw=None, timebin='0s', regridfreq=False, fillnan=None, verbose=False): # from split_cli import split_cli as split if vis.endswith('/'): vis = vis[:-1] msfile = vis if not spw: spw = '' if not timeran: timeran = '' if not bl: bl = '' if domedian: if not uvrange: uvrange = '0.2~0.8km' bl = '' else: uvrange = '' # Open the ms and plot dynamic spectrum if verbose: print('Splitting selected data...') vis_spl = './tmpms.splitted' if os.path.exists(vis_spl): os.system('rm -rf ' + vis_spl) # split(vis=msfile, outputvis=vis_spl, timerange=timeran, antenna=bl, field=field, scan=scan, spw=spw, # uvrange=uvrange, timebin=timebin, datacolumn=datacolumn) ms.open(msfile, nomodify=False) ms.split(outputms=vis_spl, whichcol=datacolumn, time=timeran, spw=spw, baseline=bl, field=field, scan=scan, uvrange=uvrange, timebin=timebin) ms.close() if verbose: print('Regridding into a single spectral window...') # print('Reading data spw by spw') try: tb.open(vis_spl + '/POLARIZATION') corrtype = tb.getcell('CORR_TYPE', 0) pols = [stokesenum[p] for p in corrtype] tb.close() except: pols = [] if regridfreq: ms.open(vis_spl, nomodify=False) ms.cvel(outframe='LSRK', mode='frequency', interp='nearest') ms.selectinit(datadescid=0, reset=True) data = ms.getdata(['amplitude', 'time', 'axis_info'], ifraxis=True) specamp = data['amplitude'] freq = data['axis_info']['freq_axis']['chan_freq'] else: ms.open(vis_spl) ms.selectinit(datadescid=0, reset=True) spwinfo = ms.getspectralwindowinfo() specamp = [] freq = [] time = [] for descid in range(len(spwinfo.keys())): ms.selectinit(datadescid=0, reset=True) ms.selectinit(datadescid=descid) data = ms.getdata(['amplitude', 'time', 'axis_info'], ifraxis=True) specamp_ = data['amplitude'] freq_ = data['axis_info']['freq_axis']['chan_freq'] time_ = data['time'] if fillnan is not None: flag_ = ms.getdata(['flag', 'time', 'axis_info'], ifraxis=True)['flag'] if type(fillnan) in [int, float, long]: specamp_[flag_] = float(fillnan) else: specamp_[flag_] = 0.0 specamp.append(specamp_) freq.append(freq_) time.append(time_) specamp = np.concatenate(specamp, axis=1) freq = np.concatenate(freq, axis=0) ms.selectinit(datadescid=0, reset=True) ms.close() os.system('rm -rf ' + vis_spl) (npol, nfreq, nbl, ntim) = specamp.shape freq = freq.reshape(nfreq) if verbose: print('npol, nfreq, nbl, ntime:', (npol, nfreq, nbl, ntim)) spec = np.swapaxes(specamp, 2, 1) tim = data['time'] if domedian: if verbose: print('doing median of all the baselines') # mask zero values before median spec_masked = np.ma.masked_where(spec < 1e-9, spec) spec_med = np.ma.filled(np.ma.median(spec_masked, axis=1), fill_value=0.) nbl = 1 ospec = spec_med.reshape((npol, nbl, nfreq, ntim)) else: ospec = spec # Save the dynamic spectral data if savespec: if not specfile: specfile = msfile + '.dspec.npz' if os.path.exists(specfile): os.system('rm -rf ' + specfile) np.savez(specfile, spec=ospec, tim=tim, freq=freq, timeran=timeran, spw=spw, bl=bl, uvrange=uvrange, pol=pols) if verbose: print('Median dynamic spectrum saved as: ' + specfile) return {'spec': ospec, 'tim': tim, 'freq': freq, 'timeran': timeran, 'spw': spw, 'bl': bl, 'uvrange': uvrange, 'pol': pols}
visa.append( '/srg/ywei/data/eovsa/sep6_dofinal/slfcal/IDB20170906T190319-195320.ms.corrected.xx.slfcal' ) visa.append( '/srg/ywei/data/eovsa/sep6_dofinal/slfcal/IDB20170906T190319-195320.ms.corrected.xx.slfcal0' ) visa.append( '/srg/ywei/data/eovsa/sep6_dofinal/slfcal/IDB20170906T190319-195320.ms.corrected.xx.slfcal01' ) visa.append( '/srg/ywei/data/eovsa/sep6_dofinal/slfcal/IDB20170906T190319-195320.ms.corrected.xx.slfcaled' ) tb.open( '/srg/ywei/data/eovsa/sep6_dofinal/slfcal/IDB20170906T190319-195320.ms.corrected.xx.slfcal' ) starttim = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') endtim = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() trange = '{0}~{1}'.format( starttim.iso.replace('-', '/').replace(' ', '/'), endtim.iso.replace('-', '/').replace(' ', '/')) workdir = '/srg/ywei/data/eovsa/sep6_dofinal' for index in range(4): imgprefix = workdir + '/slfcal/images/' + str(index) + 'testing_slf_192920' img_final = imgprefix + '_final' spws = [str(s + 1) for s in range(30)] slfcaledms = visa[index] tb.open(slfcaledms + '/SPECTRAL_WINDOW') reffreqs = tb.getcol('REF_FREQUENCY') bdwds = tb.getcol('TOTAL_BANDWIDTH') cfreqs = reffreqs + bdwds / 2.
def mk_qlook_image(trange, doimport=False, docalib=False, ncpu=10, twidth=12, stokes=None, antenna='0~12', lowcutoff_freq=3.7, imagedir=None, spws=['1~5', '6~10', '11~15', '16~25'], toTb=True, overwrite=True, doslfcal=False, verbose=False): ''' trange: can be 1) a single Time() object: use the entire day 2) a range of Time(), e.g., Time(['2017-08-01 00:00','2017-08-01 23:00']) 3) a single or a list of UDBms file(s) 4) None -- use current date Time.now() ''' antenna0 = antenna if type(trange) == Time: mslist = trange2ms(trange=trange, doimport=doimport) vis = mslist['ms'] tsts = [l.to_datetime() for l in mslist['tstlist']] if type(trange) == str: try: date = Time(trange) mslist = trange2ms(trange=trange, doimport=doimport) vis = mslist['ms'] tsts = [l.to_datetime() for l in mslist['tstlist']] except: vis = [trange] tsts = [] for v in vis: tb.open(v + '/OBSERVATION') tsts.append( Time(tb.getcell('TIME_RANGE')[0] / 24 / 3600, format='mjd').datetime) tb.close() subdir = [tst.strftime("%Y/%m/%d/") for tst in tsts] for idx, f in enumerate(vis): if f[-1] == '/': vis[idx] = f[:-1] if not stokes: stokes = 'XX' if not imagedir: imagedir = './' imres = { 'Succeeded': [], 'BeginTime': [], 'EndTime': [], 'ImageName': [], 'Spw': [], 'Vis': [], 'Synoptic': { 'Succeeded': [], 'BeginTime': [], 'EndTime': [], 'ImageName': [], 'Spw': [], 'Vis': [] } } for n, msfile in enumerate(vis): msfilebs = os.path.basename(msfile) imdir = imagedir + subdir[n] if not os.path.exists(imdir): os.makedirs(imdir) if doslfcal: slfcalms = './' + msfilebs + '.xx' split(msfile, outputvis=slfcalms, datacolumn='corrected', correlation='XX') cfreqs = getspwfreq(msfile) for spw in spws: antenna = antenna0 if spw == '': continue spwran = [s.zfill(2) for s in spw.split('~')] freqran = [cfreqs[int(s)] for s in spw.split('~')] cfreq = np.mean(freqran) bmsz = max(150. / cfreq, 20.) uvrange = '<10klambda' if doslfcal: slfcal_img = './' + msfilebs + '.slf.spw' + spw.replace( '~', '-') + '.slfimg' slfcal_tb = './' + msfilebs + '.slf.spw' + spw.replace( '~', '-') + '.slftb' try: clean(vis=slfcalms, antenna=antenna, imagename=slfcal_img, spw=spw, mode='mfs', timerange='', imagermode='csclean', psfmode='clark', imsize=[512, 512], cell=['5arcsec'], niter=100, gain=0.05, stokes='I', weighting='natural', restoringbeam=[str(bmsz) + 'arcsec'], pbcor=False, interactive=False, usescratch=True) except: print('error in cleaning spw: ' + spw) break gaincal(vis=slfcalms, refant='0', antenna=antenna, caltable=slfcal_tb, spw=spw, uvrange='', gaintable=[], selectdata=True, timerange='', solint='600s', gaintype='G', calmode='p', combine='', minblperant=3, minsnr=2, append=False) if not os.path.exists(slfcal_tb): print('No solution found in spw: ' + spw) break else: clearcal(slfcalms) delmod(slfcalms) applycal(vis=slfcalms, gaintable=[slfcal_tb], spw=spw, selectdata=True, antenna=antenna, interp='nearest', flagbackup=False, applymode='calonly', calwt=False) msfile = slfcalms imsize = 512 cell = ['5arcsec'] if len(spwran) == 2: spwstr = spwran[0] + '~' + spwran[1] else: spwstr = spwran[0] restoringbeam = ['{0:.1f}arcsec'.format(bmsz)] imagesuffix = '.spw' + spwstr.replace('~', '-') if cfreq > 10.: antenna = antenna + ';!0&1;!0&2' # deselect the shortest baselines # else: # antenna = antenna + ';!0&1' # deselect the shortest baselines res = ptclean3(vis=msfile, imageprefix=imdir, imagesuffix=imagesuffix, twidth=twidth, uvrange=uvrange, spw=spw, ncpu=ncpu, niter=1000, gain=0.05, antenna=antenna, imsize=imsize, cell=cell, stokes=stokes, doreg=True, usephacenter=False, overwrite=overwrite, toTb=toTb, restoringbeam=restoringbeam, specmode="mfs", deconvolver="hogbom", datacolumn='data', pbcor=True) if res: imres['Succeeded'] += res['Succeeded'] imres['BeginTime'] += res['BeginTime'] imres['EndTime'] += res['EndTime'] imres['ImageName'] += res['ImageName'] imres['Spw'] += [spwstr] * len(res['ImageName']) imres['Vis'] += [msfile] * len(res['ImageName']) else: continue if len(vis) == 1: # produce the band-by-band whole-day images ms.open(msfile) ms.selectinit() timfreq = ms.getdata(['time', 'axis_info'], ifraxis=True) tim = timfreq['time'] ms.close() cfreqs = getspwfreq(msfile) imdir = imagedir + subdir[0] if not os.path.exists(imdir): os.makedirs(imdir) for spw in spws: antenna = antenna0 if spw == '': spw = '{:d}~{:d}'.format( next(x[0] for x in enumerate(cfreqs) if x[1] > lowcutoff_freq), len(cfreqs) - 1) spwran = [s.zfill(2) for s in spw.split('~')] freqran = [cfreqs[int(s)] for s in spw.split('~')] cfreq = np.mean(freqran) bmsz = max(150. / cfreq, 20.) uvrange = '' imsize = 512 cell = ['5arcsec'] if len(spwran) == 2: spwstr = spwran[0] + '~' + spwran[1] else: spwstr = spwran[0] restoringbeam = ['{0:.1f}arcsec'.format(bmsz)] imagesuffix = '.synoptic.spw' + spwstr.replace('~', '-') antenna = antenna + ';!0&1' # deselect the shortest baselines res = ptclean3(vis=msfile, imageprefix=imdir, imagesuffix=imagesuffix, twidth=len(tim), uvrange=uvrange, spw=spw, ncpu=1, niter=0, gain=0.05, antenna=antenna, imsize=imsize, cell=cell, stokes=stokes, doreg=True, usephacenter=False, overwrite=overwrite, toTb=toTb, restoringbeam=restoringbeam, specmode="mfs", deconvolver="hogbom", datacolumn='data', pbcor=True) if res: imres['Synoptic']['Succeeded'] += res['Succeeded'] imres['Synoptic']['BeginTime'] += res['BeginTime'] imres['Synoptic']['EndTime'] += res['EndTime'] imres['Synoptic']['ImageName'] += res['ImageName'] imres['Synoptic']['Spw'] += [spwstr] * len(res['ImageName']) imres['Synoptic']['Vis'] += [msfile] * len(res['ImageName']) else: continue # save it for debugging purposes np.savez('imres.npz', imres=imres) return imres
def WEI_plot(dofile=True, vis=None, timerange=None, spw='', aiafits='', imagehead='', workdir='', spwCol=3, phasecenter='J2000 11h00m48 06d14m60'): if vis[-1] == '/': vis = vis[:-1] ms.open(vis) spwInfo = ms.getspectralwindowinfo() ms.close() tb.open(vis) starttim = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') endtim = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() tb.open(vis + '/SPECTRAL_WINDOW') reffreqs = tb.getcol('REF_FREQUENCY') bdwds = tb.getcol('TOTAL_BANDWIDTH') cfreqs = reffreqs + bdwds / 2. tb.close() sbeam = 35. #get timerange from vis file if not timerange: timerange = '{0}~{1}'.format( starttim.iso.replace('-', '/').replace(' ', '/'), endtim.iso.replace('-', '/').replace(' ', '/')) nspw = len(spwInfo) #draw plot aia fig = plt.figure(figsize=(12, 7), dpi=100) gs1 = gridspec.GridSpec(4, 3) gs1.update(left=0.08, right=0.32, wspace=0.05) ax1 = plt.subplot(gs1[11]) aiamap = smap.Map(aiafits) aiamap.plot(axes=ax1) #do clean spwCol by spwCol for cur_spwCol in range(0, np.floor_divide(nspw, spwCol)): if ((cur_spwCol + 1) * spwCol) < nspw: cur_spwRange = str(cur_spwCol * spwCol + 1) + '~' + str( (cur_spwCol + 1) * spwCol) else: cur_spwRange = str(cur_spwCol * spwCol + 1) + '~' + '31' imagename = imagehead + cur_spwRange + 'SPWs' cur_eovsaFits = imagename + '.fits' if cur_spwCol < 6: cur_mask = '/srg/ywei/data/eovsa/mask/sep_6mask_' + str( cur_spwCol + 1) + '.rgn' else: cur_mask = '/srg/ywei/data/eovsa/mask/sep_6/mask_6.rgn' if dofile: #clean(vis=vis, spw=cur_spwRange, timerange=timerange, imagename=imagename, imsize=[256,256], niter=100, cell=['2arcsec'] ) #clean(vis=vis, spw=cur_spwRange, timerange=timerange, imagename=imagename, imsize=[512,512], niter=1000, cell=['1arcsec'],stokes='XX', gain=0.05,weighting='briggs', mode='mfs',imagermode='csclean',psfmode='clark',robust=0.0,restoringbeam = ['10.0arcsec'], mask=cur_mask,pbcor=True) clean(vis=vis, spw=cur_spwRange, timerange=timerange, imagename=imagename, imsize=[512, 512], niter=1000, cell=['1arcsec'], stokes='XX', gain=0.05, weighting='briggs', mode='mfs', imagermode='csclean', psfmode='clark', robust=0.0, restoringbeam=['10.0arcsec'], mask='', pbcor=True) print 'fits name =' + str(cur_eovsaFits) hf.imreg(vis=vis, imagefile=imagename + '.image', fitsfile=imagename + '.fits', timerange=timerange) #plot eovsa cur_emap = smap.Map(cur_eovsaFits) cur_axname = plt.subplot(gs1[cur_spwCol + 1]) (npol, nf, nx, ny) = cur_emap.data.shape print 'shape = ' + str(cur_emap.data.shape) if npol != 1: print 'To be determined' else: cur_emap.data = cur_emap.data.reshape((512, 512)) cur_emap.plot_settings['cmap'] = plt.get_cmap('jet') cur_emap.plot(axes=cur_axname)
def make_ephem(vis, ephemfile=None): import urllib2, ssl from taskinit import tb quantities = ['1', '14', '15', '17', '19', '20', '24', '32'] quantities = ','.join(quantities) tb.open(vis) btime = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') etime = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() print "Beginning time of this scan " + btime.iso print "End time of this scan " + etime.iso btime = Time((btime.mjd - 1.0 / 60. / 24.), format='mjd') etime = Time((etime.mjd + 1.0 / 60. / 24.), format='mjd') startdate = btime.iso.replace(' ', ',')[:-7] enddate = etime.iso.replace(' ', ',')[:-7] cmd = [ "COMMAND= '10'", "CENTER= '-5@399'", "MAKE_EPHEM= 'YES'", "TABLE_TYPE= 'OBSERVER'", "START_TIME= '%s'" % startdate, "STOP_TIME= '%s'" % enddate, "STEP_SIZE= '1m'", "CAL_FORMAT= 'CAL'", "TIME_DIGITS= 'MINUTES'", "ANG_FORMAT= 'DEG'", "OUT_UNITS= 'KM-S'", "RANGE_UNITS= 'AU'", "APPARENT= 'AIRLESS'", "SOLAR_ELONG= '0,180'", "SUPPRESS_RANGE_RATE= 'NO'", "SKIP_DAYLT= 'NO'", "EXTRA_PREC= 'NO'", "R_T_S_ONLY= 'NO'", "REF_SYSTEM= 'J2000'", "CSV_FORMAT= 'YES'", "OBJ_DATA= 'YES'", "TIME_DIGITS ='MIN'", "QUANTITIES= '{}'".format(quantities) ] cmdstr = "http://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=l&" + '&'.join( cmd) try: context = ssl._create_unverified_context() f = urllib2.urlopen(cmdstr, context=context) except: f = urllib2.urlopen(cmdstr) lines = f.readlines() f.close() istart = 0 for i, l in enumerate(lines): if l[0:5] == '$$SOE': # start recording istart = i + 1 if l[0:5] == '$$EOE': # end recording iend = i if not ephemfile: ephemfile = 'sun-ephem-geo.txt' with open(ephemfile, 'w') as fb: for i, l in enumerate(lines): if i == istart - 3: fb.write( ' Date__(UT)__HR:MN R.A.___(J2000.0)___DEC. Ob-lon Ob-lat Sl-lon Sl-lat NP.ang NP.dist r rdot delta deldot S-T-O' ) if i >= istart and i < iend: l_s = l.split(',') l_s.pop(1) l_s.pop(1) fb.write(' '.join(l_s)) else: fb.write(l)
def calc_phasecenter_from_solxy(vis, timerange='', xycen=None, usemsphacenter=True): ''' return the phase center in RA and DEC of a given solar coordinates :param vis: input measurement sets file :param timerange: can be a string or astropy.time.core.Time object, or a 2-element list of string or Time object :param xycen: solar x-pos and y-pos in arcsec :param usemsphacenter: :return: phasecenter midtim: mid time of the given timerange ''' tb.open(vis + '/POINTING') tst = Time(tb.getcell('TIME_ORIGIN', 0) / 24. / 3600., format='mjd') ted = Time(tb.getcell('TIME_ORIGIN', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() datstr = tst.iso[:10] if isinstance(timerange, Time): try: (sttim, edtim) = timerange except: sttim = timerange edtim = sttim else: if timerange == '': sttim = tst edtim = ted else: try: (tstart, tend) = timerange.split('~') if tstart[2] == ':': sttim = Time(datstr + 'T' + tstart) edtim = Time(datstr + 'T' + tend) # timerange = '{0}/{1}~{0}/{2}'.format(datstr.replace('-', '/'), tstart, tend) else: sttim = Time(qa.quantity(tstart, 'd')['value'], format='mjd') edtim = Time(qa.quantity(tend, 'd')['value'], format='mjd') except: try: if timerange[2] == ':': sttim = Time(datstr + 'T' + timerange) edtim = sttim else: sttim = Time(qa.quantity(timerange, 'd')['value'], format='mjd') edtim = sttim except ValueError: print("keyword 'timerange' in wrong format") ms.open(vis) metadata = ms.metadata() observatory = metadata.observatorynames()[0] ms.close() midtim_mjd = (sttim.mjd + edtim.mjd) / 2. midtim = Time(midtim_mjd, format='mjd') eph = read_horizons(t0=midtim) if observatory == 'EOVSA' or (not usemsphacenter): print('This is EOVSA data') # use RA and DEC from FIELD ID 0 tb.open(vis + '/FIELD') phadir = tb.getcol('PHASE_DIR').flatten() tb.close() ra0 = phadir[0] dec0 = phadir[1] else: ra0 = eph['ra'][0] dec0 = eph['dec'][0] if not xycen: # use solar disk center as default phasecenter = 'J2000 ' + str(ra0) + 'rad ' + str(dec0) + 'rad' else: x0 = np.radians(xycen[0] / 3600.) y0 = np.radians(xycen[1] / 3600.) p0 = np.radians(eph['p0'][0]) # p angle in radians raoff = -((x0) * np.cos(p0) - y0 * np.sin(p0)) / np.cos(eph['dec'][0]) decoff = (x0) * np.sin(p0) + y0 * np.cos(p0) newra = ra0 + raoff newdec = dec0 + decoff phasecenter = 'J2000 ' + str(newra) + 'rad ' + str(newdec) + 'rad' return phasecenter, midtim
def read_horizons(t0=None, dur=None, vis=None, observatory=None, verbose=False): ''' This function visits JPL Horizons to retrieve J2000 topocentric RA and DEC of the solar disk center as a function of time. Keyword arguments: t0: Referece time in astropy.Time format dur: duration of the returned coordinates. Default to 2 minutes vis: CASA visibility dataset (in measurement set format). If provided, use entire duration from the visibility data observatory: observatory code (from JPL Horizons). If not provided, use information from visibility. if no visibility found, use earth center (code=500) verbose: True to provide extra information Usage: >>> from astropy.time import Time >>> out = read_horizons(t0=Time('2017-09-10 16:00:00'), observatory='-81') >>> out = read_horizons(vis = 'mydata.ms') History: BC (sometime in 2014): function was first wrote, followed by a number of edits by BC and SY BC (2019-07-16): Added docstring documentation ''' try: # For Python 3.0 and later from urllib.request import urlopen except ImportError: # Fall back to Python 2's urllib2 from urllib2 import urlopen import ssl if not t0 and not vis: t0 = Time.now() if not dur: dur = 1. / 60. / 24. # default to 2 minutes if t0: try: btime = Time(t0) except: print('input time ' + str(t0) + ' not recognized') return -1 if vis: if not os.path.exists(vis): print('Input ms data ' + vis + ' does not exist! ') return -1 try: # ms.open(vis) # summary = ms.summary() # ms.close() # btime = Time(summary['BeginTime'], format='mjd') # etime = Time(summary['EndTime'], format='mjd') ## alternative way to avoid conflicts with importeovsa, if needed -- more time consuming if observatory == 'geocentric': observatory = '500' else: ms.open(vis) metadata = ms.metadata() if metadata.observatorynames()[0] == 'EVLA': observatory = '-5' elif metadata.observatorynames()[0] == 'EOVSA': observatory = '-81' elif metadata.observatorynames()[0] == 'ALMA': observatory = '-7' ms.close() tb.open(vis) btime_vis = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') etime_vis = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() if verbose: print("Beginning time of this scan " + btime_vis.iso) print("End time of this scan " + etime_vis.iso) # extend the start and end time for jpl horizons by 0.5 hr on each end btime = Time(btime_vis.mjd - 0.5 / 24., format='mjd') dur = etime_vis.mjd - btime_vis.mjd + 1.0 / 24. except: print('error in reading ms file: ' + vis + ' to obtain the ephemeris!') return -1 # default the observatory to geocentric, if none provided if not observatory: observatory = '500' etime = Time(btime.mjd + dur, format='mjd') try: cmdstr = "https://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=1&TABLE_TYPE='OBSERVER'&QUANTITIES='1,17,20'&CSV_FORMAT='YES'&ANG_FORMAT='DEG'&CAL_FORMAT='BOTH'&SOLAR_ELONG='0,180'&CENTER='{}@399'&COMMAND='10'&START_TIME='".format( observatory ) + btime.iso.replace( ' ', ',' ) + "'&STOP_TIME='" + etime.iso[:-4].replace( ' ', ',' ) + "'&STEP_SIZE='1m'&SKIP_DAYLT='NO'&EXTRA_PREC='YES'&APPARENT='REFRACTED'" cmdstr = cmdstr.replace("'", "%27") try: context = ssl._create_unverified_context() f = urlopen(cmdstr, context=context) except: f = urlopen(cmdstr) lines = f.readlines() f.close() except: # todo use geocentric coordinate for the new VLA data import requests, collections params = collections.OrderedDict() params['batch'] = '1' params['TABLE_TYPE'] = "'OBSERVER'" params['QUANTITIES'] = "'1,17,20'" params['CSV_FORMAT'] = "'YES'" params['ANG_FORMAT'] = "'DEG'" params['CAL_FORMAT'] = "'BOTH'" params['SOLAR_ELONG'] = "'0,180'" if observatory == '500': params['CENTER'] = "'500'" else: params['CENTER'] = "'{}@399'".format(observatory) params['COMMAND'] = "'10'" params['START_TIME'] = "'{}'".format(btime.iso[:-4].replace(' ', ',')) params['STOP_TIME'] = "'{}'".format(etime.iso[:-4].replace(' ', ',')) params['STEP_SIZE'] = "'1m'" params['SKIP_DAYLT'] = "'NO'" params['EXTRA_PREC'] = "'YES'" params['APPAENT'] = "'REFRACTED'" results = requests.get("https://ssd.jpl.nasa.gov/horizons_batch.cgi", params=params) lines = [ll for ll in results.iter_lines()] # add a check for python 3 if py3: lines = [l.decode('utf-8', 'backslashreplace') for l in lines] nline = len(lines) istart = 0 for i in range(nline): if lines[i][0:5] == '$$SOE': # start recording istart = i + 1 if lines[i][0:5] == '$$EOE': # end recording iend = i newlines = lines[istart:iend] nrec = len(newlines) ephem_ = [] t = [] ra = [] dec = [] p0 = [] delta = [] for line in newlines: items = line.split(',') t.append(Time(float(items[1]), format='jd').mjd) ra.append(np.radians(float(items[4]))) dec.append(np.radians(float(items[5]))) p0.append(float(items[6])) delta.append(float(items[8])) # convert list of dictionary to a dictionary of arrays ephem = {'time': t, 'ra': ra, 'dec': dec, 'p0': p0, 'delta': delta} return ephem
def concateovsa(vis, concatvis, datacolumn='corrected', keep_orig_ms=True, cols2rm="model,corrected", freqtol="", dirtol="", respectname=False, timesort=True, copypointing=True, visweightscale=[], forcesingleephemfield=""): if concatvis[-1] == os.path.sep: concatvis = concatvis[:-1] if os.path.sep not in concatvis: visprefix = './' else: visprefix = os.path.dirname(concatvis) + os.path.sep msfiles = vis msfiles_ = [] for idx, ll in enumerate(msfiles): if str(ll).endswith('/'): msfiles[idx] = str(ll)[:-1] datacolumn = datacolumn.lower() if datacolumn == 'data': print 'DATA columns will be concatenated.' for ll in msfiles: clearcal(vis=str(ll), addmodel=True) elif datacolumn == 'corrected': # try: print 'CORRECTED columns will be concatenated.' tmpdir = os.path.join(visprefix, 'tmp_ms') + os.path.sep if not os.path.exists(tmpdir): os.makedirs(tmpdir) for ll in msfiles: msfile_ = os.path.join(tmpdir, os.path.basename(str(ll))) msfiles_.append(msfile_) split(vis=str(ll), outputvis=msfile_, datacolumn='corrected') clearcal(vis=msfile_, addmodel=True) else: raise ValueError('Please set datacolumn to be "data" or "corrected"!') if msfiles_: concat(vis=msfiles_, concatvis=concatvis, freqtol=freqtol, dirtol=dirtol, respectname=respectname, timesort=timesort, copypointing=copypointing, visweightscale=visweightscale, forcesingleephemfield=forcesingleephemfield) os.system('rm -rf {}'.format(tmpdir)) else: concat(vis=msfiles, concatvis=concatvis, freqtol=freqtol, dirtol=dirtol, respectname=respectname, timesort=timesort, copypointing=copypointing, visweightscale=visweightscale, forcesingleephemfield=forcesingleephemfield) # Change all observation ids to be the same (zero) tb.open(concatvis + '/OBSERVATION', nomodify=False) nobs = tb.nrows() tim0 = tb.getcell('TIME_RANGE', 0)[0] tim1 = tb.getcell('TIME_RANGE', nobs - 1)[1] tb.removerows([i + 1 for i in range(nobs - 1)]) tb.putcell('TIME_RANGE', 0, [tim0, tim1]) tb.close() tb.open(concatvis + '/DATA_DESCRIPTION', nomodify=False) nrows = tb.nrows() pol_id = tb.getcol('POLARIZATION_ID') tb.removerows(np.where(pol_id != 0)[0]) tb.close() tb.open(concatvis, nomodify=False) dd_id = tb.getcol('DATA_DESC_ID') idx_dd_id, = np.where(dd_id >= nrows / 2) dd_id[idx_dd_id] = dd_id[idx_dd_id] - nrows / 2 tb.putcol('DATA_DESC_ID', dd_id) tb.close() tb.open(concatvis + '/FIELD', nomodify=False) nobs = tb.nrows() tb.removerows([i + 1 for i in range(nobs - 1)]) tb.close() tb.open(concatvis + '/SOURCE', nomodify=False) nobs = tb.nrows() tb.removerows([i + 1 for i in range(nobs - 1)]) tb.close() tb.open(concatvis, nomodify=False) obsid = tb.getcol('OBSERVATION_ID') newobsid = np.zeros(len(obsid), dtype='int') tb.putcol('OBSERVATION_ID', newobsid) fldid = tb.getcol('FIELD_ID') newfldid = np.zeros(len(fldid), dtype='int') tb.putcol('FIELD_ID', newfldid) colnames = tb.colnames() cols2rm = cols2rm.upper() cols2rm = cols2rm.split(',') for l in range(len(cols2rm)): col = cols2rm[l] + '_DATA' if col in colnames: try: tb.removecols(col) print 'Column {} removed.'.format(col) except: pass tb.close() if msfiles_ != [] and msfiles_ != msfiles: for ll in msfiles_: os.system('rm -rf {}'.format(ll)) if not keep_orig_ms: for ll in msfiles: os.system('rm -rf {}'.format(ll))
def calibeovsa(vis=None, caltype=None, interp=None, docalib=True, doflag=True, flagant=None, doimage=False, imagedir=None, antenna=None, timerange=None, spw=None, stokes=None, doconcat=False, msoutdir=None, keep_orig_ms=True): ''' :param vis: EOVSA visibility dataset(s) to be calibrated :param caltype: :param interp: :param docalib: :param qlookimage: :param flagant: :param stokes: :param doconcat: :return: ''' if type(vis) == str: vis = [vis] for idx, f in enumerate(vis): if f[-1] == '/': vis[idx] = f[:-1] for msfile in vis: casalog.origin('calibeovsa') if not caltype: casalog.post( "Caltype not provided. Perform reference phase calibration and daily phase calibration." ) caltype = [ 'refpha', 'phacal', 'fluxcal' ] ## use this line after the phacal is applied # caltype = ['refcal'] if not os.path.exists(msfile): casalog.post("Input visibility does not exist. Aborting...") continue if msfile.endswith('/'): msfile = msfile[:-1] if not msfile[-3:] in ['.ms', '.MS']: casalog.post( "Invalid visibility. Please provide a proper visibility file ending with .ms" ) # if not caltable: # caltable=[os.path.basename(vis).replace('.ms','.'+c) for c in caltype] # get band information tb.open(msfile + '/SPECTRAL_WINDOW') nspw = tb.nrows() bdname = tb.getcol('NAME') bd_nchan = tb.getcol('NUM_CHAN') bd = [int(b[4:]) - 1 for b in bdname] # band index from 0 to 33 # nchans = tb.getcol('NUM_CHAN') # reffreqs = tb.getcol('REF_FREQUENCY') # cenfreqs = np.zeros((nspw)) tb.close() tb.open(msfile + '/ANTENNA') nant = tb.nrows() antname = tb.getcol('NAME') antlist = [str(ll) for ll in range(len(antname) - 1)] antennas = ','.join(antlist) tb.close() # get time stamp, use the beginning of the file tb.open(msfile + '/OBSERVATION') trs = {'BegTime': [], 'EndTime': []} for ll in range(tb.nrows()): tim0, tim1 = Time(tb.getcell('TIME_RANGE', ll) / 24 / 3600, format='mjd') trs['BegTime'].append(tim0) trs['EndTime'].append(tim1) tb.close() trs['BegTime'] = Time(trs['BegTime']) trs['EndTime'] = Time(trs['EndTime']) btime = np.min(trs['BegTime']) etime = np.max(trs['EndTime']) # ms.open(vis) # summary = ms.summary() # ms.close() # btime = Time(summary['BeginTime'], format='mjd') # etime = Time(summary['EndTime'], format='mjd') ## stop using ms.summary to avoid conflicts with importeovsa t_mid = Time((btime.mjd + etime.mjd) / 2., format='mjd') print "This scan observed from {} to {} UTC".format( btime.iso, etime.iso) gaintables = [] if ('refpha' in caltype) or ('refamp' in caltype) or ('refcal' in caltype): refcal = ra.sql2refcalX(btime) pha = refcal['pha'] # shape is 15 (nant) x 2 (npol) x 34 (nband) pha[np.where(refcal['flag'] == 1)] = 0. amp = refcal['amp'] amp[np.where(refcal['flag'] == 1)] = 1. t_ref = refcal['timestamp'] # find the start and end time of the local day when refcal is registered try: dhr = t_ref.LocalTime.utcoffset().total_seconds() / 60. / 60. except: dhr = -7. bt = Time(np.fix(t_ref.mjd + dhr / 24.) - dhr / 24., format='mjd') et = Time(bt.mjd + 1., format='mjd') (yr, mon, day) = (bt.datetime.year, bt.datetime.month, bt.datetime.day) dirname = caltbdir + str(yr) + str(mon).zfill(2) + '/' if not os.path.exists(dirname): os.mkdir(dirname) # check if there is any ROACH reboot between the reference calibration found and the current data t_rbts = db.get_reboot(Time([t_ref, btime])) if not t_rbts: casalog.post( "Reference calibration is derived from observation at " + t_ref.iso) print "Reference calibration is derived from observation at " + t_ref.iso else: casalog.post( "Oh crap! Roach reboot detected between the reference calibration time " + t_ref.iso + ' and the current observation at ' + btime.iso) casalog.post("Aborting...") print "Oh crap! Roach reboot detected between the reference calibration time " + t_ref.iso + ' and the current observation at ' + btime.iso print "Aborting..." para_pha = [] para_amp = [] calpha = np.zeros((nspw, 15, 2)) calamp = np.zeros((nspw, 15, 2)) for s in range(nspw): for n in range(15): for p in range(2): calpha[s, n, p] = pha[n, p, bd[s]] calamp[s, n, p] = amp[n, p, bd[s]] para_pha.append(np.degrees(pha[n, p, bd[s]])) para_amp.append(amp[n, p, bd[s]]) if 'fluxcal' in caltype: calfac = pc.get_calfac(Time(t_mid.iso.split(' ')[0] + 'T23:59:59')) t_bp = Time(calfac['timestamp'], format='lv') if int(t_mid.mjd) == int(t_bp.mjd): accalfac = calfac['accalfac'] # (ant x pol x freq) # tpcalfac = calfac['tpcalfac'] # (ant x pol x freq) caltb_autoamp = dirname + t_bp.isot[:-4].replace( ':', '').replace('-', '') + '.bandpass' if not os.path.exists(caltb_autoamp): bandpass(vis=msfile, caltable=caltb_autoamp, solint='inf', refant='eo01', minblperant=0, minsnr=0, bandtype='B', docallib=False) tb.open(caltb_autoamp, nomodify=False) # (ant x spw) bd_chanidx = np.hstack([[0], bd_nchan.cumsum()]) for ll in range(nspw): antfac = np.sqrt( accalfac[:, :, bd_chanidx[ll]:bd_chanidx[ll + 1]]) # # antfac *= tpcalfac[:, :,bd_chanidx[ll]:bd_chanidx[ll + 1]] antfac = np.moveaxis(antfac, 0, 2) cparam = np.zeros((2, bd_nchan[ll], nant)) cparam[:, :, :-3] = 1.0 / antfac tb.putcol('CPARAM', cparam + 0j, ll * nant, nant) paramerr = tb.getcol('PARAMERR', ll * nant, nant) paramerr = paramerr * 0 tb.putcol('PARAMERR', paramerr, ll * nant, nant) bpflag = tb.getcol('FLAG', ll * nant, nant) bpant1 = tb.getcol('ANTENNA1', ll * nant, nant) bpflagidx, = np.where(bpant1 >= 13) bpflag[:] = False bpflag[:, :, bpflagidx] = True tb.putcol('FLAG', bpflag, ll * nant, nant) bpsnr = tb.getcol('SNR', ll * nant, nant) bpsnr[:] = 100.0 bpsnr[:, :, bpflagidx] = 0.0 tb.putcol('SNR', bpsnr, ll * nant, nant) tb.close() msg_prompt = "Scaling calibration is derived for {}.".format( msfile) casalog.post(msg_prompt) print msg_prompt gaintables.append(caltb_autoamp) else: msg_prompt = "Caution: No TPCAL is available on {}. No scaling calibration is derived for {}.".format( t_mid.datetime.strftime('%b %d, %Y'), msfile) casalog.post(msg_prompt) print msg_prompt if ('refpha' in caltype) or ('refcal' in caltype): # caltb_pha = os.path.basename(vis).replace('.ms', '.refpha') # check if the calibration table already exists caltb_pha = dirname + t_ref.isot[:-4].replace(':', '').replace( '-', '') + '.refpha' if not os.path.exists(caltb_pha): gencal(vis=msfile, caltable=caltb_pha, caltype='ph', antenna=antennas, pol='X,Y', spw='0~' + str(nspw - 1), parameter=para_pha) gaintables.append(caltb_pha) if ('refamp' in caltype) or ('refcal' in caltype): # caltb_amp = os.path.basename(vis).replace('.ms', '.refamp') caltb_amp = dirname + t_ref.isot[:-4].replace(':', '').replace( '-', '') + '.refamp' if not os.path.exists(caltb_amp): gencal(vis=msfile, caltable=caltb_amp, caltype='amp', antenna=antennas, pol='X,Y', spw='0~' + str(nspw - 1), parameter=para_amp) gaintables.append(caltb_amp) # calibration for the change of delay center between refcal time and beginning of scan -- hopefully none! xml, buf = ch.read_calX(4, t=[t_ref, btime], verbose=False) if buf: dly_t2 = Time(stf.extract(buf[0], xml['Timestamp']), format='lv') dlycen_ns2 = stf.extract(buf[0], xml['Delaycen_ns'])[:15] xml, buf = ch.read_calX(4, t=t_ref) dly_t1 = Time(stf.extract(buf, xml['Timestamp']), format='lv') dlycen_ns1 = stf.extract(buf, xml['Delaycen_ns'])[:15] dlycen_ns_diff = dlycen_ns2 - dlycen_ns1 for n in range(2): dlycen_ns_diff[:, n] -= dlycen_ns_diff[0, n] print 'Multi-band delay is derived from delay center difference at {} & {}'.format( dly_t1.iso, dly_t2.iso) # print '=====Delays relative to Ant 14=====' # for i, dl in enumerate(dlacen_ns_diff[:, 0] - dlacen_ns_diff[13, 0]): # ant = antlist[i] # print 'Ant eo{0:02d}: x {1:.2f} ns & y {2:.2f} ns'.format(int(ant) + 1, dl # dlacen_ns_diff[i, 1] - dlacen_ns_diff[13, 1]) # caltb_mbd0 = os.path.basename(vis).replace('.ms', '.mbd0') caltb_dlycen = dirname + dly_t2.isot[:-4].replace(':', '').replace( '-', '') + '.dlycen' if not os.path.exists(caltb_dlycen): gencal(vis=msfile, caltable=caltb_dlycen, caltype='mbd', pol='X,Y', antenna=antennas, parameter=dlycen_ns_diff.flatten().tolist()) gaintables.append(caltb_dlycen) if 'phacal' in caltype: phacals = np.array( ra.sql2phacalX([bt, et], neat=True, verbose=False)) if not phacals.any() or len(phacals) == 0: print "Found no phacal records in SQL database, will skip phase calibration" else: # first generate all phacal calibration tables if not already exist t_phas = Time([phacal['t_pha'] for phacal in phacals]) # sort the array in ascending order by t_pha sinds = t_phas.mjd.argsort() t_phas = t_phas[sinds] phacals = phacals[sinds] caltbs_phambd = [] for i, phacal in enumerate(phacals): # filter out phase cals with reference time stamp >30 min away from the provided refcal time if (phacal['t_ref'].jd - refcal['timestamp'].jd) > 30. / 1440.: del phacals[i] del t_phas[i] continue else: t_pha = phacal['t_pha'] phambd_ns = phacal['pslope'] for n in range(2): phambd_ns[:, n] -= phambd_ns[0, n] # set all flagged values to be zero phambd_ns[np.where(phacal['flag'] == 1)] = 0. caltb_phambd = dirname + t_pha.isot[:-4].replace( ':', '').replace('-', '') + '.phambd' caltbs_phambd.append(caltb_phambd) if not os.path.exists(caltb_phambd): gencal(vis=msfile, caltable=caltb_phambd, caltype='mbd', pol='X,Y', antenna=antennas, parameter=phambd_ns.flatten().tolist()) # now decides which table to apply depending on the interpolation method ("neatest" or "linear") if interp == 'nearest': tbind = np.argmin(np.abs(t_phas.mjd - t_mid.mjd)) dt = np.min(np.abs(t_phas.mjd - t_mid.mjd)) * 24. print "Selected nearest phase calibration table at " + t_phas[ tbind].iso gaintables.append(caltbs_phambd[tbind]) if interp == 'linear': # bphacal = ra.sql2phacalX(btime) # ephacal = ra.sql2phacalX(etime,reverse=True) bt_ind, = np.where(t_phas.mjd < btime.mjd) et_ind, = np.where(t_phas.mjd > etime.mjd) if len(bt_ind) == 0 and len(et_ind) == 0: print "No phacal found before or after the ms data within the day of observation" print "Skipping daily phase calibration" elif len(bt_ind) > 0 and len(et_ind) == 0: gaintables.append(caltbs_phambd[bt_ind[-1]]) elif len(bt_ind) == 0 and len(et_ind) > 0: gaintables.append(caltbs_phambd[et_ind[0]]) elif len(bt_ind) > 0 and len(et_ind) > 0: bphacal = phacals[bt_ind[-1]] ephacal = phacals[et_ind[0]] # generate a new table interpolating between two daily phase calibrations t_pha_mean = Time(np.mean( [bphacal['t_pha'].mjd, ephacal['t_pha'].mjd]), format='mjd') phambd_ns = (bphacal['pslope'] + ephacal['pslope']) / 2. for n in range(2): phambd_ns[:, n] -= phambd_ns[0, n] # set all flagged values to be zero phambd_ns[np.where(bphacal['flag'] == 1)] = 0. phambd_ns[np.where(ephacal['flag'] == 1)] = 0. caltb_phambd_interp = dirname + t_pha_mean.isot[:-4].replace( ':', '').replace('-', '') + '.phambd' if not os.path.exists(caltb_phambd_interp): gencal(vis=msfile, caltable=caltb_phambd_interp, caltype='mbd', pol='X,Y', antenna=antennas, parameter=phambd_ns.flatten().tolist()) print "Using phase calibration table interpolated between records at " + bphacal[ 't_pha'].iso + ' and ' + ephacal['t_pha'].iso gaintables.append(caltb_phambd_interp) if docalib: clearcal(msfile) applycal(vis=msfile, gaintable=gaintables, applymode='calflag', calwt=False) # delete the interpolated phase calibration table try: caltb_phambd_interp except: pass else: if os.path.exists(caltb_phambd_interp): shutil.rmtree(caltb_phambd_interp) if doflag: # flag zeros and NaNs flagdata(vis=msfile, mode='clip', clipzeros=True) if flagant: try: flagdata(vis=msfile, antenna=flagant) except: print "Something wrong with flagant. Abort..." if doimage: from matplotlib import pyplot as plt from suncasa.utils import helioimage2fits as hf from sunpy import map as smap if not antenna: antenna = '0~12' if not stokes: stokes = 'XX' if not timerange: timerange = '' if not spw: spw = '1~3' if not imagedir: imagedir = '.' #(yr, mon, day) = (bt.datetime.year, bt.datetime.month, bt.datetime.day) #dirname = imagedir + str(yr) + '/' + str(mon).zfill(2) + '/' + str(day).zfill(2) + '/' #if not os.path.exists(dirname): # os.makedirs(dirname) bds = [spw] nbd = len(bds) imgs = [] for bd in bds: if '~' in bd: bdstr = bd.replace('~', '-') else: bdstr = str(bd).zfill(2) imname = imagedir + '/' + os.path.basename(msfile).replace( '.ms', '.bd' + bdstr) print 'Cleaning image: ' + imname try: clean(vis=msfile, imagename=imname, antenna=antenna, spw=bd, timerange=timerange, imsize=[512], cell=['5.0arcsec'], stokes=stokes, niter=500) except: print 'clean not successfull for band ' + str(bd) else: imgs.append(imname + '.image') junks = ['.flux', '.mask', '.model', '.psf', '.residual'] for junk in junks: if os.path.exists(imname + junk): shutil.rmtree(imname + junk) tranges = [btime.iso + '~' + etime.iso] * nbd fitsfiles = [img.replace('.image', '.fits') for img in imgs] hf.imreg(vis=msfile, timerange=tranges, imagefile=imgs, fitsfile=fitsfiles, usephacenter=False) plt.figure(figsize=(6, 6)) for i, fitsfile in enumerate(fitsfiles): plt.subplot(1, nbd, i + 1) eomap = smap.Map(fitsfile) sz = eomap.data.shape if len(sz) == 4: eomap.data = eomap.data.reshape((sz[2], sz[3])) eomap.plot_settings['cmap'] = plt.get_cmap('jet') eomap.plot() eomap.draw_limb() eomap.draw_grid() plt.show() if doconcat: if len(vis) > 1: # from suncasa.eovsa import concateovsa as ce from suncasa.tasks import concateovsa_cli as ce if msoutdir is None: msoutdir = './' concatvis = os.path.basename(vis[0]) concatvis = msoutdir + '/' + concatvis.split('.')[0] + '_concat.ms' ce.concateovsa(vis, concatvis, datacolumn='corrected', keep_orig_ms=keep_orig_ms, cols2rm="model,corrected") return [concatvis] else: return vis
def read_horizons(vis): import urllib2 import ssl if not os.path.exists(vis): print 'Input ms data ' + vis + ' does not exist! ' return -1 try: #ms.open(vis) #summary = ms.summary() #ms.close() #btime = Time(summary['BeginTime'], format='mjd') #etime = Time(summary['EndTime'], format='mjd') ## alternative way to avoid conflicts with importeovsa, if needed -- more time consuming tb.open(vis) btime = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') etime = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() print "Beginning time of this scan " + btime.iso print "End time of this scan " + etime.iso try: context = ssl._create_unverified_context() f = urllib2.urlopen( "http://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=l&TABLE_TYPE='OBSERVER'&QUANTITIES='1,17,20'&CSV_FORMAT='YES'&ANG_FORMAT='DEG'&CAL_FORMAT='BOTH'&SOLAR_ELONG='0,180'&CENTER='-81@399'&COMMAND='10'&START_TIME='" + btime.iso.replace(' ', ',') + "'&STOP_TIME='" + etime.iso[:-4].replace(' ', ',') + "'&STEP_SIZE='1m'&SKIP_DAYLT='NO'", context=context) except: f = urllib2.urlopen( "http://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=l&TABLE_TYPE='OBSERVER'&QUANTITIES='1,17,20'&CSV_FORMAT='YES'&ANG_FORMAT='DEG'&CAL_FORMAT='BOTH'&SOLAR_ELONG='0,180'&CENTER='-81@399'&COMMAND='10'&START_TIME='" + btime.iso.replace(' ', ',') + "'&STOP_TIME='" + etime.iso[:-4].replace(' ', ',') + "'&STEP_SIZE='1m'&SKIP_DAYLT='NO'") except: print 'error in reading ms file: ' + vis + ' to obtain the ephemeris!' return -1 # inputs: # ephemfile: # OBSERVER output from JPL Horizons for topocentric coordinates with for example # target=Sun, observer=VLA=-5 # extra precision, quantities 1,17,20, REFRACTION # routine goes through file to find $$SOE which is start of ephemeris and ends with $$EOE # outputs: a Python dictionary containing the following: # timestr: date and time as a string # time: modified Julian date # ra: right ascention, in rad # dec: declination, in rad # rastr: ra in string # decstr: dec in string # p0: solar p angle, CCW with respect to the celestial north pole # delta: distance from the disk center to the observer, in AU # delta_dot: time derivative of delta, in the light of sight direction. Negative means it is moving toward the observer # # initialize the return dictionary ephem0 = dict.fromkeys(['time', 'ra', 'dec', 'delta', 'p0']) lines = f.readlines() f.close() nline = len(lines) istart = 0 for i in range(nline): line = lines[i] if line[0:5] == '$$SOE': # start recording istart = i + 1 if line[0:5] == '$$EOE': # end recording iend = i newlines = lines[istart:iend] nrec = len(newlines) ephem_ = [] t = [] ra = [] dec = [] p0 = [] delta = [] for line in newlines: items = line.split(',') # t.append({'unit':'mjd','value':Time(float(items[1]),format='jd').mjd}) # ra.append({'unit': 'rad', 'value': np.radians(float(items[4]))}) # dec.append({'unit': 'rad', 'value': np.radians(float(items[5]))}) # p0.append({'unit': 'deg', 'value': float(items[6])}) # delta.append({'unit': 'au', 'value': float(items[8])}) t.append(Time(float(items[1]), format='jd').mjd) ra.append(np.radians(float(items[4]))) dec.append(np.radians(float(items[5]))) p0.append(float(items[6])) delta.append(float(items[8])) # convert list of dictionary to a dictionary of arrays ephem = {'time': t, 'ra': ra, 'dec': dec, 'p0': p0, 'delta': delta} return ephem
def get_dspec(vis=None, savespec=True, specfile=None, bl='', uvrange='', field='', scan='', datacolumn='data', domedian=False, timeran=None, spw=None, timebin='0s', regridfreq=False, fillnan=None, verbose=False, usetbtool=False): # from split_cli import split_cli as split if vis.endswith('/'): vis = vis[:-1] msfile = vis if not spw: spw = '' if not timeran: timeran = '' if domedian: if not uvrange: uvrange = '0.2~0.8km' # bl = '' else: uvrange = '' if not bl: bl = '' else: uvrange = '' # Open the ms and plot dynamic spectrum if verbose: print('Splitting selected data...') if usetbtool: try: tb.open(vis + '/POLARIZATION') corrtype = tb.getcell('CORR_TYPE', 0) pols = [stokesenum[p] for p in corrtype] tb.close() except: pols = [] antmask = [] if uvrange is not '' or bl is not '': ms.open(vis) ms.selectinit(datadescid=0) mdata = ms.metadata() antlist = mdata.antennaids() mdata.done() staql = {'uvdist': uvrange, 'baseline': bl, 'spw': spw, 'field': field, 'scan': scan, 'timerange': timeran} ### todo the selection only works for uvrange and bl. To make the selection of other items works, ## I need to make mask for other items. a = ms.msselect(staql) mdata = ms.metadata() baselines = mdata.baselines() for lidx, l in enumerate(antlist): antmask.append(baselines[l][antlist[lidx:]]) antmask = np.hstack(antmask) mdata.done() ms.close() tb.open(vis) spwtb = tbtool() spwtb.open(vis + '/SPECTRAL_WINDOW') ptb = tbtool() ptb.open(vis + '/POLARIZATION') ms.open(vis) spwlist = [] mdata = ms.metadata() nspw = mdata.nspw() nbl = mdata.nbaselines() + mdata.nantennas() nscans = mdata.nscans() spw_nfrq = [] # List of number of frequencies in each spw for i in range(nspw): spw_nfrq.append(mdata.nchan(i)) spw_nfrq = np.array(spw_nfrq) nf = np.sum(spw_nfrq) smry = mdata.summary() scan_ntimes = [] # List of number of times in each scan for iscan in range(nscans): scan_ntimes.append( smry['observationID=0']['arrayID=0']['scan=' + str(iscan)]['fieldID=0']['nrows'] / nspw / nbl) scan_ntimes = np.array(scan_ntimes) scan_ntimes_integer = scan_ntimes.astype(np.int) if len(np.where(scan_ntimes % scan_ntimes_integer != 0)[0]) != 0: # if True: scan_ntimes = [] # List of number of times in each scan for iscan in range(nscans): scan_ntimes.append( len(smry['observationID=0']['arrayID=0']['scan=' + str(iscan)]['fieldID=0'].keys()) - 6) scan_ntimes = np.array(scan_ntimes) else: scan_ntimes = scan_ntimes_integer nt = np.sum(scan_ntimes) times = tb.getcol('TIME') if times[nbl] - times[0] != 0: # This is frequency/scan sort order order = 'f' elif times[nbl * nspw - 1] - times[0] != 0: # This is time sort order order = 't' npol = ptb.getcol('NUM_CORR', 0, 1)[0] ptb.close() freq = np.zeros(nf, float) times = np.zeros(nt, float) if order == 't': specamp = np.zeros((npol, nf, nbl, nt), np.complex) for j in range(nt): fptr = 0 # Loop over spw for i in range(nspw): # Get channel frequencies for this spw (annoyingly comes out as shape (nf, 1) cfrq = spwtb.getcol('CHAN_FREQ', i, 1)[:, 0] if j == 0: # Only need this the first time through spwlist += [i] * len(cfrq) if i == 0: times[j] = tb.getcol('TIME', nbl * (i + nspw * j), 1) # Get the time spec_ = tb.getcol('DATA', nbl * (i + nspw * j), nbl) # Get complex data for this spw flag = tb.getcol('FLAG', nbl * (i + nspw * j), nbl) # Get flags for this spw nfrq = len(cfrq) # Apply flags if type(fillnan) in [int, float]: spec_[flag] = float(fillnan) else: spec_[flag] = 0.0 # Insert data for this spw into larger array specamp[:, fptr:fptr + nfrq, :, j] = spec_ freq[fptr:fptr + nfrq] = cfrq fptr += nfrq else: specf = np.zeros((npol, nf, nt, nbl), np.complex) # Array indexes are swapped iptr = 0 for j in range(nscans): # Loop over scans for i in range(nspw): # Loop over spectral windows s = scan_ntimes[j] f = spw_nfrq[i] s1 = np.sum(scan_ntimes[:j]) # Start time index s2 = np.sum(scan_ntimes[:j + 1]) # End time index f1 = np.sum(spw_nfrq[:i]) # Start freq index f2 = np.sum(spw_nfrq[:i + 1]) # End freq index spec_ = tb.getcol('DATA', iptr, nbl * s) flag = tb.getcol('FLAG', iptr, nbl * s) if j == 0: cfrq = spwtb.getcol('CHAN_FREQ', i, 1)[:, 0] freq[f1:f2] = cfrq spwlist += [i] * len(cfrq) times[s1:s2] = tb.getcol('TIME', iptr, nbl * s).reshape(s, nbl)[:, 0] # Get the times iptr += nbl * s # Apply flags if type(fillnan) in [int, float]: spec_[flag] = float(fillnan) else: spec_[flag] = 0.0 # Insert data for this spw into larger array specf[:, f1:f2, s1:s2] = spec_.reshape(npol, f, s, nbl) # Swap the array indexes back to the desired order specamp = np.swapaxes(specf, 2, 3) tb.close() spwtb.close() ms.close() if len(antmask) > 0: specamp = specamp[:, :, np.where(antmask)[0], :] (npol, nfreq, nbl, ntim) = specamp.shape tim = times else: # Open the ms and plot dynamic spectrum if verbose: print('Splitting selected data...') vis_spl = './tmpms.splitted' if os.path.exists(vis_spl): os.system('rm -rf ' + vis_spl) # split(vis=msfile, outputvis=vis_spl, timerange=timeran, antenna=bl, field=field, scan=scan, spw=spw, # uvrange=uvrange, timebin=timebin, datacolumn=datacolumn) try: from split_cli import split_cli as split split(vis=msfile, outputvis=vis_spl, datacolumn=datacolumn, timerange=timeran, spw=spw, antenna=bl, field=field, scan=scan, uvrange=uvrange, timebin=timebin) except: ms.open(msfile, nomodify=True) ms.split(outputms=vis_spl, whichcol=datacolumn, time=timeran, spw=spw, baseline=bl, field=field, scan=scan, uvrange=uvrange, timebin=timebin) ms.close() if verbose: print('Regridding into a single spectral window...') # print('Reading data spw by spw') try: tb.open(vis_spl + '/POLARIZATION') corrtype = tb.getcell('CORR_TYPE', 0) pols = [stokesenum[p] for p in corrtype] tb.close() except: pols = [] if regridfreq: ms.open(vis_spl, nomodify=False) ms.cvel(outframe='LSRK', mode='frequency', interp='nearest') ms.selectinit(datadescid=0, reset=True) data = ms.getdata(['amplitude', 'time', 'axis_info'], ifraxis=True) specamp = data['amplitude'] freq = data['axis_info']['freq_axis']['chan_freq'] else: ms.open(vis_spl) ms.selectinit(datadescid=0, reset=True) spwinfo = ms.getspectralwindowinfo() specamp = [] freq = [] time = [] for descid in range(len(spwinfo.keys())): ms.selectinit(datadescid=0, reset=True) ms.selectinit(datadescid=descid) data = ms.getdata(['amplitude', 'time', 'axis_info'], ifraxis=True) specamp_ = data['amplitude'] freq_ = data['axis_info']['freq_axis']['chan_freq'] time_ = data['time'] if fillnan is not None: flag_ = ms.getdata(['flag', 'time', 'axis_info'], ifraxis=True)['flag'] if type(fillnan) in [int, float, long]: specamp_[flag_] = float(fillnan) else: specamp_[flag_] = 0.0 specamp.append(specamp_) freq.append(freq_) time.append(time_) specamp = np.concatenate(specamp, axis=1) freq = np.concatenate(freq, axis=0) ms.selectinit(datadescid=0, reset=True) ms.close() os.system('rm -rf ' + vis_spl) (npol, nfreq, nbl, ntim) = specamp.shape freq = freq.reshape(nfreq) tim = data['time'] if verbose: print('npol, nfreq, nbl, ntime:', (npol, nfreq, nbl, ntim)) spec = np.swapaxes(specamp, 2, 1) if domedian: if verbose: print('doing median of all the baselines') # mask zero values before median # spec_masked = np.ma.masked_where(spec < 1e-9, spec) # spec_masked2 = np.ma.masked_invalid(spec) # spec_masked = np.ma.masked_array(spec, mask=np.logical_or(spec_masked.mask, spec_masked2.mask)) # spec_med = np.ma.filled(np.ma.median(spec_masked, axis=1), fill_value=0.) spec = np.abs(spec) spec_med = np.nanmedian(spec, axis=1) nbl = 1 ospec = spec_med.reshape((npol, nbl, nfreq, ntim)) else: ospec = spec # Save the dynamic spectral data if savespec: if not specfile: specfile = msfile + '.dspec.npz' if os.path.exists(specfile): os.system('rm -rf ' + specfile) np.savez(specfile, spec=ospec, tim=tim, freq=freq, timeran=timeran, spw=spw, bl=bl, uvrange=uvrange, pol=pols) if verbose: print('Median dynamic spectrum saved as: ' + specfile) return {'spec': ospec, 'tim': tim, 'freq': freq, 'timeran': timeran, 'spw': spw, 'bl': bl, 'uvrange': uvrange, 'pol': pols}
def compVarColTables(referencetab, testtab, varcol, tolerance=0.): '''Compare a variable column of two tables. referencetab --> a reference table testtab --> a table to verify varcol --> the name of a variable column (str) Returns True or False. ''' retval = True tb2 = casac.table() tb.open(referencetab) cnames = tb.colnames() tb2.open(testtab) col = varcol if tb.isvarcol(col) and tb2.isvarcol(col): try: # First check if tb.nrows() != tb2.nrows(): print 'Length of %s differ from %s, %s!=%s'%(referencetab,testtab,len(rk),len(tk)) retval = False else: for therow in xrange(tb.nrows()): rdata = tb.getcell(col,therow) tdata = tb2.getcell(col,therow) # if not (rdata==tdata).all(): if not rdata.all()==tdata.all(): if (tolerance>0.): differs=False for j in range(0,len(rdata)): ### if (type(rdata[j])==float or type(rdata[j])==int): if ((isinstance(rdata[j],float)) or (isinstance(rdata[j],int))): if (abs(rdata[j]-tdata[j]) > tolerance*abs(rdata[j]+tdata[j])): # print 'Column ', col,' differs in tables ', referencetab, ' and ', testtab # print therow, j # print rdata[j] # print tdata[j] differs = True ### elif (type(rdata[j])==list or type(rdata[j])==np.ndarray): elif (isinstance(rdata[j],list)) or (isinstance(rdata[j],np.ndarray)): for k in range(0,len(rdata[j])): if (abs(rdata[j][k]-tdata[j][k]) > tolerance*abs(rdata[j][k]+tdata[j][k])): # print 'Column ', col,' differs in tables ', referencetab, ' and ', testtab # print therow, j, k # print rdata[j][k] # print tdata[j][k] differs = True if differs: print 'ERROR: Column %s of %s and %s do not agree within tolerance %s'%(col,referencetab, testtab, tolerance) retval = False break else: print 'ERROR: Column %s of %s and %s do not agree.'%(col,referencetab, testtab) print 'ERROR: First row to differ is row=%s'%therow retval = False break finally: tb.close() tb2.close() else: print 'Columns are not varcolumns.' retval = False if retval: print 'Column %s of %s and %s agree'%(col,referencetab, testtab) return retval
def svplot(vis, timerange=None, spw='', workdir='./', specfile=None, bl=None, uvrange=None, stokes='RR,LL', dmin=None, dmax=None, goestime=None, reftime=None, xycen=None, fov=[500.,500.], xyrange=None, restoringbeam=[''], robust=0.0, niter=500, imsize=[512], cell=['5.0arcsec'],interactive=False, usemsphacenter=True, imagefile=None, fitsfile=None, plotaia=True, aiawave=171, aiafits=None, savefig=False, mkmovie=False, overwrite=True, ncpu=10, twidth=1, verbose=True): ''' Required inputs: vis: calibrated CASA measurement set Important optional inputs: timerange: timerange for clean. Standard CASA time selection format. If not provided, use the entire range (*BE CAREFUL, COULD BE VERY SLOW*) spw: spectral window selection following the CASA syntax. Examples: spw='1:2~60' (spw id 1, channel range 2-60); spw='*:1.2~1.3GHz' (selects all channels within 1.2-1.3 GHz; note the *) specfile: supply dynamic spectrum save file (from suncasa.utils.dspec2.get_dspec()). Otherwise generate a median dynamic spectrum on the fly Optional inputs: bl: baseline to generate dynamic spectrum uvrange: uvrange to select baselines for generating dynamic spectrum stokes: polarization of the clean image, can be 'RR,LL' or 'I,V' dmin,dmax: color bar parameter goestime: goes plot time, example ['2016/02/18 18:00:00','2016/02/18 23:00:00'] rhessisav: rhessi savefile reftime: reftime for the image xycen: center of the image in helioprojective coordinates (HPLN/HPLT), in arcseconds. Example: [900, -150.] fov: field of view in arcsecs. Example: [500., 500.] xyrange: field of view in solar XY coordinates. Format: [[x1,x2],[y1,y2]]. Example: [[900., 1200.],[0,300]] ***NOTE: THIS PARAMETER OVERWRITES XYCEN AND FOV*** aiawave: wave length of aia file in a imagefile: if imagefile provided, use it. Otherwise do clean and generate a new one. fitsfile: if fitsfile provided, use it. Otherwise generate a new one savefig: whether to save the figure Example: ''' if xycen: xc, yc = xycen xlen, ylen = fov if parse_version(sunpy.__version__)>parse_version('0.8.0'): xyrange = [[xc - xlen / 2.0, yc - ylen / 2.0], [xc + xlen / 2.0, yc + ylen / 2.0]] else: xyrange = [[xc - xlen / 2.0, xc + xlen / 2.0], [yc - ylen / 2.0, yc + ylen / 2.0]] stokes_allowed = ['RR,LL', 'I,V', 'RRLL', 'IV'] if not stokes in stokes_allowed: print 'wrong stokes parameter ' + str(stokes) + '. Allowed values are ' + ', '.join(stokes_allowed) return -1 if stokes == 'RRLL': stokes = 'RR,LL' if stokes == 'IV': stokes = 'I,V' if vis[-1] == '/': vis = vis[:-1] if not os.path.exists(vis): print 'input measurement not exist' return -1 if aiafits is None: aiafits = '' # split the data # generating dynamic spectrum if not os.path.exists(workdir): os.makedirs(workdir) if specfile: try: specdata = np.load(specfile) except: print('Provided dynamic spectrum file not numpy npz. Generating one from the visibility data') specfile = os.path.join(workdir, os.path.basename(vis) + '.dspec.npz') dspec_external(vis, workdir=workdir, specfile=specfile) specdata = np.load(specfile) # specdata = ds.get_dspec(vis, domedian=True, verbose=True) else: print('Dynamic spectrum file not provided; Generating one from the visibility data') # specdata = ds.get_dspec(vis, domedian=True, verbose=True) specfile = os.path.join(workdir, os.path.basename(vis) + '.dspec.npz') dspec_external(vis, workdir=workdir, specfile=specfile) specdata = np.load(specfile) tb.open(vis) starttim = Time(tb.getcell('TIME', 0) / 24. / 3600., format='mjd') endtim = Time(tb.getcell('TIME', tb.nrows() - 1) / 24. / 3600., format='mjd') tb.close() datstr = starttim.iso[:10] if timerange is None or timerange == '': starttim1 = starttim endtim1 = endtim timerange = '{0}~{1}'.format(starttim.iso.replace('-', '/').replace(' ', '/'), endtim.iso.replace('-', '/').replace(' ', '/')) else: try: (tstart, tend) = timerange.split('~') if tstart[2] == ':': starttim1 = Time(datstr + 'T' + tstart) endtim1 = Time(datstr + 'T' + tend) timerange = '{0}/{1}~{0}/{2}'.format(datstr.replace('-', '/'), tstart, tend) else: starttim1 = Time(qa.quantity(tstart, 'd')['value'], format='mjd') endtim1 = Time(qa.quantity(tend, 'd')['value'], format='mjd') except ValueError: print "keyword 'timerange' in wrong format" midtime_mjd = (starttim1.mjd + endtim1.mjd) / 2. if vis.endswith('/'): vis = vis[:-1] visname = os.path.basename(vis) bt = starttim1.plot_date et = endtim1.plot_date # find out min and max frequency for plotting in dynamic spectrum ms.open(vis) metadata = ms.metadata() observatory = metadata.observatorynames()[0] spwInfo = ms.getspectralwindowinfo() nspw = len(spwInfo) if not spw: spw = '0~' + str(nspw - 1) staql = {'timerange': timerange, 'spw': spw} if ms.msselect(staql, onlyparse=True): ndx = ms.msselectedindices() chan_sel = ndx['channel'] nspw = chan_sel.shape[0] bspw = chan_sel[0, 0] bchan = chan_sel[0, 1] espw = chan_sel[-1, 0] echan = chan_sel[-1, 2] bfreq = spwInfo[str(bspw)]['Chan1Freq'] + spwInfo[str(bspw)]['ChanWidth'] * bchan efreq = spwInfo[str(espw)]['Chan1Freq'] + spwInfo[str(espw)]['ChanWidth'] * echan bfreqghz = bfreq / 1e9 efreqghz = efreq / 1e9 if verbose: print 'selected timerange {}'.format(timerange) print 'selected frequency range {0:6.3f} to {1:6.3f} GHz'.format(bfreqghz, efreqghz) else: print "spw or timerange selection failed. Aborting..." ms.close() return -1 ms.close() if observatory == 'EOVSA': print 'Provide stokes: ' + str(stokes) + '. However EOVSA has linear feeds. Force stokes to be IV' stokes = 'I,V' if mkmovie: plt.ioff() # fig = plt.figure(figsize=(12, 7.5), dpi=100) if fitsfile: pass else: if not imagefile: # from ptclean_cli import ptclean_cli as ptclean eph = hf.read_horizons(t0=Time(midtime_mjd, format='mjd')) if observatory == 'EOVSA' or (not usemsphacenter): phasecenter = '' else: phasecenter = 'J2000 ' + str(eph['ra'][0])[:15] + 'rad ' + str(eph['dec'][0])[:15] + 'rad' print 'use phasecenter: ' + phasecenter qlookfitsdir = os.path.join(workdir, 'qlookfits/') qlookfigdir = os.path.join(workdir, 'qlookimgs/') imresfile = os.path.join(qlookfitsdir, '{}.imres.npz'.format(os.path.basename(vis))) if overwrite: imres = mk_qlook_image(vis, twidth=twidth, ncpu=ncpu, imagedir=qlookfitsdir, phasecenter=phasecenter, stokes=stokes, c_external=True) else: if os.path.exists(imresfile): imres = np.load(imresfile) imres = imres['imres'].item() else: print('Image results file not found; Creating new images.') imres = mk_qlook_image(vis, twidth=twidth, ncpu=ncpu, imagedir=qlookfitsdir, phasecenter=phasecenter, stokes=stokes, c_external=True) if not os.path.exists(qlookfigdir): os.makedirs(qlookfigdir) plt_qlook_image(imres, figdir=qlookfigdir, specdata=specdata, verbose=True, stokes=stokes, fov=xyrange) else: spec = specdata['spec'] (npol, nbl, nfreq, ntim) = spec.shape tidx = range(ntim) fidx = range(nfreq) tim = specdata['tim'] freq = specdata['freq'] freqghz = freq / 1e9 spec_tim = Time(specdata['tim'] / 3600. / 24., format='mjd') timstrr = spec_tim.plot_date plt.ion() fig = plt.figure(figsize=(12, 7), dpi=100) gs1 = gridspec.GridSpec(3, 1) gs1.update(left=0.08, right=0.32, wspace=0.05) gs2 = gridspec.GridSpec(2, 2) gs2.update(left=0.38, right=0.98, hspace=0.02, wspace=0.02) spec_1 = np.absolute(spec[0, 0, :, :]) spec_2 = np.absolute(spec[1, 0, :, :]) if observatory == 'EVLA': # circular feeds polstr = ['RR', 'LL'] if observatory == 'EOVSA' or observatory == 'ALMA': # linear feeds polstr = ['XX', 'YY'] print 'plot the dynamic spectrum in pol ' + ' & '.join(polstr) ax1 = plt.subplot(gs1[0]) ax1.pcolormesh(timstrr, freqghz, spec_1, cmap='jet', vmin=dmin, vmax=dmax) ax1.set_xlim(timstrr[tidx[0]], timstrr[tidx[-1]]) ax1.xaxis_date() ax1.xaxis.set_major_formatter(DateFormatter("%H:%M:%S")) # ax1.set_xticklabels(['']*10) ax1.set_ylim(freqghz[fidx[0]], freqghz[fidx[-1]]) ax1.set_ylabel('Frequency (GHz)', fontsize=10) ax1.set_title(observatory + ' ' + datstr + ' ' + polstr[0] + ' & ' + polstr[1], fontsize=12) ax1.set_autoscale_on(False) ax1.add_patch(patches.Rectangle((bt, bfreqghz), et - bt, efreqghz - bfreqghz, ec='w', fill=False)) ax1.plot([(bt + et) / 2.], [(bfreqghz + efreqghz) / 2.], '*w', ms=12) for tick in ax1.get_xticklabels(): tick.set_fontsize(8) for tick in ax1.get_yticklabels(): tick.set_fontsize(8) ax2 = plt.subplot(gs1[1]) ax2.pcolormesh(timstrr, freqghz, spec_2, cmap='jet', vmin=dmin, vmax=dmax) ax2.set_xlim(timstrr[tidx[0]], timstrr[tidx[-1]]) ax2.xaxis_date() ax2.xaxis.set_major_formatter(DateFormatter("%H:%M:%S")) ax2.set_ylim(freqghz[fidx[0]], freqghz[fidx[-1]]) ax2.set_ylabel('Frequency (GHz)', fontsize=10) for tick in ax2.get_xticklabels(): tick.set_fontsize(8) for tick in ax2.get_yticklabels(): tick.set_fontsize(8) ax2.set_autoscale_on(False) ax2.add_patch(patches.Rectangle((bt, bfreqghz), et - bt, efreqghz - bfreqghz, ec='w', fill=False)) ax2.plot([(bt + et) / 2.], [(bfreqghz + efreqghz) / 2.], '*w', ms=12) # Second part: GOES plot if goestime: btgoes = goestime[0] etgoes = goestime[1] else: datstrg = datstr.replace('-', '/') btgoes = datstrg + ' ' + qa.time(qa.quantity(tim[0] - 1800, 's'), form='clean', prec=9)[0] etgoes = datstrg + ' ' + qa.time(qa.quantity(tim[tidx[-1] - 1] + 1800, 's'), form='clean', prec=9)[0] if verbose: print 'Acquire GOES soft X-ray data in from ' + btgoes + ' to ' + etgoes ax3 = plt.subplot(gs1[2]) try: from sunpy import lightcurve as lc from sunpy.time import TimeRange goest = lc.GOESLightCurve.create(TimeRange(btgoes, etgoes)) except: goesscript = os.path.join(workdir, 'goes.py') goesdatafile = os.path.join(workdir, 'goes.dat') os.system('rm -rf {}'.format(goesscript)) fi = open(goesscript, 'wb') fi.write('import os \n') fi.write('from sunpy.time import TimeRange \n') fi.write('from sunpy import lightcurve as lc \n') fi.write('import pickle \n') fi.write('goesplottim = TimeRange("{0}", "{1}") \n'.format(btgoes, etgoes)) fi.write('goes = lc.GOESLightCurve.create(goesplottim) \n') fi.write('fi2 = open("{}", "wb") \n'.format(goesdatafile)) fi.write('pickle.dump(goes, fi2) \n') fi.write('fi2.close()') fi.close() try: os.system('python {}'.format(goesscript)) os.system('rm -rf {}'.format(goesscript)) except NameError: print "Bad input names" except ValueError: print "Bad input values" except: print "Unexpected error:", sys.exc_info()[0] print "Error in generating GOES light curves. Proceed without GOES..." if os.path.exists(goesdatafile): fi1 = file(goesdatafile, 'rb') goest = pickle.load(fi1) fi1.close() try: dates = mpl.dates.date2num(parse_time(goest.data.index)) goesdif = np.diff(goest.data['xrsb']) gmax = np.nanmax(goesdif) gmin = np.nanmin(goesdif) ran = gmax - gmin db = 2.8 / ran goesdifp = goesdif * db + gmin + (-6) ax3.plot_date(dates, np.log10(goest.data['xrsb']), '-', label='1.0--8.0 $\AA$', color='red', lw=2) ax3.plot_date(dates[0:-1], goesdifp, '-', label='derivate', color='blue', lw=0.4) ax3.set_ylim([-7, -3]) ax3.set_yticks([-7, -6, -5, -4, -3]) ax3.set_yticklabels([r'$10^{-7}$', r'$10^{-6}$', r'$10^{-5}$', r'$10^{-4}$', r'$10^{-3}$']) ax3.set_title('Goes Soft X-ray', fontsize=12) ax3.set_ylabel('Watts m$^{-2}$') ax3.set_xlabel(datetime.datetime.isoformat(goest.data.index[0])[0:10]) ax3.axvspan(dates[899], dates[dates.size - 899], alpha=0.2) ax2 = ax3.twinx() # ax2.set_yscale("log") ax2.set_ylim([-7, -3]) ax2.set_yticks([-7, -6, -5, -4, -3]) ax2.set_yticklabels(['B', 'C', 'M', 'X', '']) ax3.yaxis.grid(True, 'major') ax3.xaxis.grid(False, 'major') ax3.legend(prop={'size': 6}) formatter = mpl.dates.DateFormatter('%H:%M') ax3.xaxis.set_major_formatter(formatter) ax3.fmt_xdata = mpl.dates.DateFormatter('%H:%M') except: print 'Error in downloading GOES soft X-ray data. Proceeding with out soft X-ray plot.' # third part # start to download the fits files if plotaia: if not aiafits: newlist = [] items = glob.glob('*.fits') for names in items: str1 = starttim1.iso[:4] + '_' + starttim1.iso[5:7] + '_' + starttim1.iso[8:10] + 't' + starttim1.iso[ 11:13] + '_' + starttim1.iso[14:16] str2 = str(aiawave) if names.endswith(".fits"): if names.find(str1) != -1 and names.find(str2) != -1: newlist.append(names) newlist.append('0') if newlist and os.path.exists(newlist[0]): aiafits = newlist[0] else: print 'downloading the aiafits file' wave1 = aiawave - 3 wave2 = aiawave + 3 t1 = Time(starttim1.mjd - 0.02 / 24., format='mjd') t2 = Time(endtim1.mjd + 0.02 / 24., format='mjd') try: from sunpy.net import vso client = vso.VSOClient() qr = client.query(vso.attrs.Time(t1.iso, t2.iso), vso.attrs.Instrument('aia'), vso.attrs.Wave(wave1 * u.AA, wave2 * u.AA)) res = client.get(qr, path='{file}') except: SdoDownloadscript = os.path.join(workdir, 'SdoDownload.py') os.system('rm -rf {}'.format(SdoDownloadscript)) fi = open(SdoDownloadscript, 'wb') fi.write('from sunpy.net import vso \n') fi.write('from astropy import units as u \n') fi.write('client = vso.VSOClient() \n') fi.write( "qr = client.query(vso.attrs.Time('{0}', '{1}'), vso.attrs.Instrument('aia'), vso.attrs.Wave({2} * u.AA, {3} * u.AA)) \n".format( t1.iso, t2.iso, wave1, wave2)) fi.write("res = client.get(qr, path='{file}') \n") fi.close() try: os.system('python {}'.format(SdoDownloadscript)) except NameError: print "Bad input names" except ValueError: print "Bad input values" except: print "Unexpected error:", sys.exc_info()[0] print "Error in Downloading AIA fits files. Proceed without AIA..." # Here something is needed to check whether it has finished downloading the fits files or not if not aiafits: newlist = [] items = glob.glob('*.fits') for nm in items: str1 = starttim1.iso[:4] + '_' + starttim1.iso[5:7] + '_' + starttim1.iso[8:10] + 't' + starttim1.iso[ 11:13] + '_' + starttim1.iso[14:16] str2 = str(aiawave) if nm.find(str1) != -1 and nm.find(str2) != -1: newlist.append(nm) if newlist: aiafits = newlist[0] print 'AIA fits ' + aiafits + ' selected' else: print 'no AIA fits files found. Proceed without AIA' try: aiamap = smap.Map(aiafits) except: print 'error in reading aiafits. Proceed without AIA' # RCP or I ax4 = plt.subplot(gs2[0, 0]) ax5 = plt.subplot(gs2[1, 0]) # LCP or V ax6 = plt.subplot(gs2[0, 1]) ax7 = plt.subplot(gs2[1, 1]) if fitsfile: pass else: if not imagefile: eph = hf.read_horizons(t0=Time(midtime_mjd, format='mjd')) if observatory == 'EOVSA' or (not usemsphacenter): print 'This is EOVSA data' # use RA and DEC from FIELD ID 0 tb.open(vis+'/FIELD') phadir = tb.getcol('PHASE_DIR').flatten() tb.close() ra0 = phadir[0] dec0 = phadir[1] if stokes == 'RRLL' or stokes == 'RR,LL': print 'Provide stokes: ' + str(stokes) + '. However EOVSA has linear feeds. Force stokes to be IV' stokes = 'I,V' else: ra0 = eph['ra'][0] dec0 = eph['dec'][0] if not xycen: # use solar disk center as default phasecenter = 'J2000 ' + str(ra0) + 'rad ' + str(dec0) + 'rad' else: x0 = np.radians(xycen[0]/3600.) y0 = np.radians(xycen[1]/3600.) p0 = np.radians(eph['p0'][0]) # p angle in radians raoff = -((x0) * np.cos(p0) - y0 * np.sin(p0))/np.cos(eph['dec'][0]) decoff = (x0) * np.sin(p0) + y0 * np.cos(p0) newra = ra0 + raoff newdec = dec0 + decoff phasecenter = 'J2000 ' + str(newra) + 'rad ' + str(newdec) + 'rad' imagename = os.path.join(workdir, visname + '.outim') if os.path.exists(imagename + '.image') or os.path.exists(imagename + '.flux'): os.system('rm -rf ' + imagename + '.*') sto = stokes.replace(',', '') print 'do clean for ' + timerange + ' in spw ' + spw + ' stokes ' + sto print 'Original phasecenter: '+ str(ra0) + str(dec0) print 'use phasecenter: ' + phasecenter clean(vis=vis, imagename=imagename, selectdata=True, spw=spw, timerange=timerange, stokes=sto, niter=niter, interactive=interactive, npercycle=50, imsize=imsize, cell=cell, restoringbeam=restoringbeam, weighting='briggs', robust=robust, phasecenter=phasecenter) os.system('rm -rf ' + imagename + '.psf') os.system('rm -rf ' + imagename + '.flux') os.system('rm -rf ' + imagename + '.model') os.system('rm -rf ' + imagename + '.mask') os.system('rm -rf ' + imagename + '.residual') imagefile = imagename + '.image' fitsfile = imagefile + '.fits' hf.imreg(vis=vis, ephem=eph, imagefile=imagefile, timerange=timerange, reftime=reftime, fitsfile=fitsfile, verbose=True, overwrite=True) print 'fits file ' + fitsfile + ' selected' ax4.cla() ax5.cla() ax6.cla() ax7.cla() rfits = fitsfile try: hdulist = fits.open(rfits) hdu = hdulist[0] (npol, nf, nx, ny) = hdu.data.shape rmap = smap.Map(hdu.data[0, 0, :, :], hdu.header) except: print 'radio fits file not recognized by sunpy.map. Aborting...' return -1 if npol > 1: rmap1 = smap.Map(hdu.data[0, 0, :, :], hdu.header) rmap2 = smap.Map(hdu.data[1, 0, :, :], hdu.header) XX, YY = np.meshgrid(np.arange(rmap.data.shape[1]), np.arange(rmap.data.shape[0])) try: rmapx, rmapy = rmap.pixel_to_data(XX * u.pix, YY * u.pix) except: rmapxy = rmap.pixel_to_data(XX * u.pix, YY * u.pix) rmapx = rmapxy.Tx rmapy = rmapxy.Ty if not xyrange: if xycen: x0 = xycen[0] * u.arcsec y0 = xycen[1] * u.arcsec if not xycen: row, col = rmap1.data.shape positon = np.nanargmax(rmap1.data) m, n = divmod(positon, col) x0 = rmap1.xrange[0] + rmap1.scale[1] * (n + 0.5) * u.pix y0 = rmap1.yrange[0] + rmap1.scale[0] * (m + 0.5) * u.pix if len(fov) == 1: fov=[fov]*2 sz_x = fov[0] * u.arcsec sz_y = fov[1] * u.arcsec x1 = x0 - sz_x/2. x2 = x0 + sz_x/2. y1 = y0 - sz_y/2. y2 = y0 + sz_y/2. xyrange = [[x1.value, x2.value], [y1.value, y2.value]] else: sz_x = xyrange[0][1] - xyrange[0][0] sz_y = xyrange[1][1] - xyrange[1][0] clevels1 = np.linspace(0.2, 0.9, 5) if stokes.split(',')[1] == 'V': clevels2 = np.array([0.8, -0.6, -0.4, -0.2, 0.2, 0.4, 0.6, 0.8]) else: clevels2 = np.linspace(0.2, 0.9, 5) if 'aiamap' in vars(): aiamap.plot_settings['cmap'] = plt.get_cmap('binary') if rmap: title = 'AIA {0:.0f} + {1} {2:6.3f} GHz'.format(aiamap.wavelength.value, observatory, (bfreqghz + efreqghz) / 2.0) else: title = 'AIA {0:.0f}'.format(aiamap.wavelength.value) aiamap.plot(axes=ax4) ax4.set_title(title + ' ' + stokes.split(',')[0], fontsize=12) aiamap.draw_limb() aiamap.draw_grid() aiamap.draw_rectangle((xyrange[0][0], xyrange[1][0]) * u.arcsec, sz_x, sz_y) aiamap.plot(axes=ax6) ax6.set_title(title + ' ' + stokes.split(',')[1], fontsize=12) aiamap.draw_limb() aiamap.draw_grid() aiamap.draw_rectangle((xyrange[0][0], xyrange[1][0]) * u.arcsec, sz_x, sz_y) if rmap: ax4.contour(rmapx.value, rmapy.value, rmap1.data, levels=clevels1 * np.nanmax(rmap1.data), cmap=cm.jet) ax6.contour(rmapx.value, rmapy.value, rmap2.data, levels=clevels2 * np.nanmax(rmap2.data), cmap=cm.RdBu) ax4.text(0.02, 0.02, 'AIA {0:.0f} '.format(aiamap.wavelength.value) + aiamap.date.strftime('%H:%M:%S'), verticalalignment='bottom', horizontalalignment='left', transform=ax4.transAxes, color='k', fontsize=10) ax6.text(0.02, 0.02, 'AIA {0:.0f} '.format(aiamap.wavelength.value) + aiamap.date.strftime('%H:%M:%S'), verticalalignment='bottom', horizontalalignment='left', transform=ax6.transAxes, color='k', fontsize=10) else: title = '{0} {1:6.3f} GHz'.format(observatory, (bfreqghz + efreqghz) / 2.0) rmap1.plot(axes=ax4, cmap=cm.jet) ax4.set_title(title + ' ' + stokes.split(',')[0], fontsize=12) rmap1.draw_limb() rmap1.draw_grid() rmap1.draw_rectangle((xyrange[0][0], xyrange[1][0]) * u.arcsec, sz_x, sz_y) rmap2.plot(axes=ax6, cmap=cm.RdBu) ax6.set_title(title + ' ' + stokes.split(',')[1], fontsize=12) rmap2.draw_limb() rmap2.draw_grid() # ax4.contour(rmapx.value, rmapy.value, rmap1.data, levels=np.linspace(0.2, 0.9, 5) * np.nanmax(rmap1.data), # cmap=cm.gray) # ax6.contour(rmapx.value, rmapy.value, rmap2.data, levels=np.linspace(0.2, 0.9, 5) * np.nanmax(rmap2.data), # cmap=cm.gray) rmap2.draw_rectangle((xyrange[0][0], xyrange[1][0]) * u.arcsec, sz_x, sz_y) ax4.set_xlim(-1200, 1200) ax4.set_ylim(-1200, 1200) ax6.set_xlim(-1200, 1200) ax6.set_ylim(-1200, 1200) try: subrmap1 = rmap1.submap(xyrange[0] * u.arcsec, xyrange[1] * u.arcsec) subrmap2 = rmap2.submap(xyrange[0] * u.arcsec, xyrange[1] * u.arcsec) except: bl = SkyCoord(xyrange[0][0] * u.arcsec, xyrange[1][0] * u.arcsec, frame=rmap1.coordinate_frame) tr = SkyCoord(xyrange[0][1] * u.arcsec, xyrange[1][1] * u.arcsec, frame=rmap1.coordinate_frame) subrmap1 = rmap1.submap(bl, tr) subrmap2 = rmap2.submap(bl, tr) XX, YY = np.meshgrid(np.arange(subrmap1.data.shape[1]), np.arange(subrmap1.data.shape[0])) try: subrmapx, subrmapy = subrmap1.pixel_to_data(XX * u.pix, YY * u.pix) except: subrmapxy = subrmap1.pixel_to_data(XX * u.pix, YY * u.pix) subrmapx = subrmapxy.Tx subrmapy = subrmapxy.Ty if 'aiamap' in vars(): try: subaiamap = aiamap.submap(xyrange[0] * u.arcsec, xyrange[1] * u.arcsec) except: bl = SkyCoord(xyrange[0][0] * u.arcsec, xyrange[1][0] * u.arcsec, frame=aiamap.coordinate_frame) tr = SkyCoord(xyrange[0][1] * u.arcsec, xyrange[1][1] * u.arcsec, frame=aiamap.coordinate_frame) subaiamap = aiamap.submap(bl, tr) subaiamap.plot(axes=ax5, title='') subaiamap.draw_limb() subaiamap.draw_grid() subaiamap.plot(axes=ax7, title='') subaiamap.draw_limb() subaiamap.draw_grid() ax5.contour(subrmapx.value, subrmapy.value, subrmap1.data, levels=clevels1 * np.nanmax(subrmap1.data), cmap=cm.jet) ax7.contour(subrmapx.value, subrmapy.value, subrmap2.data, levels=clevels2 * np.nanmax(subrmap2.data), cmap=cm.RdBu) # subaiamap.draw_rectangle((fov[0][0], fov[1][0]) * u.arcsec, 400 * u.arcsec, 400 * u.arcsec) else: subrmap1.plot(axes=ax5, cmap=cm.jet, title='') subrmap1.draw_limb() subrmap1.draw_grid() subrmap2.plot(axes=ax7, cmap=cm.RdBu, title='') subrmap2.draw_limb() subrmap2.draw_grid() # ax5.contour(subrmapx.value, subrmapy.value, subrmap1.data, # levels=clevels1 * np.nanmax(subrmap1.data), cmap=cm.gray) # ax7.contour(subrmapx.value, subrmapy.value, subrmap2.data, # levels=clevels2 * np.nanmax(subrmap2.data), cmap=cm.gray) # subrmap1.draw_rectangle((fov[0][0], fov[1][0]) * u.arcsec, 400 * u.arcsec, 400 * u.arcsec) # subrmap2.draw_rectangle((fov[0][0], fov[1][0]) * u.arcsec, 400 * u.arcsec, 400 * u.arcsec) ax5.set_xlim(xyrange[0]) ax5.set_ylim(xyrange[1]) ax5.text(0.02, 0.02, observatory + ' ' + rmap.date.strftime('%H:%M:%S.%f')[:-3], verticalalignment='bottom', horizontalalignment='left', transform=ax5.transAxes, color='k', fontsize=10) ax7.set_xlim(xyrange[0]) ax7.set_ylim(xyrange[1]) ax7.text(0.02, 0.02, observatory + ' ' + rmap.date.strftime('%H:%M:%S.%f')[:-3], verticalalignment='bottom', horizontalalignment='left', transform=ax7.transAxes, color='k', fontsize=10) fig.show()
def make_ephem(vis, ephemfile=None): ''' make ephemeris table from JPLhorizon: Cautioned that the tab created in CASA 5+ lacks many columns such as Rho, RadVel, etc., which are essential for fixplanets The issue comes from a update on jplreader after 2015. some keys are not write to the ephemeris table. This only works with CASA 4+ :param vis: :param ephemfile: :return: ''' import urllib2, ssl from taskinit import tb quantities = ['1', '14', '15', '17', '19', '20', '24', '32'] quantities = ','.join(quantities) tb.open(vis + '/OBSERVATION') trs = {'BegTime': [], 'EndTime': []} for ll in range(tb.nrows()): tim0, tim1 = Time(tb.getcell('TIME_RANGE', ll) / 24 / 3600, format='mjd') trs['BegTime'].append(tim0) trs['EndTime'].append(tim1) tb.close() trs['BegTime'] = Time(trs['BegTime']) trs['EndTime'] = Time(trs['EndTime']) btime = np.min(trs['BegTime']) etime = np.max(trs['EndTime']) print("Beginning time of this scan " + btime.iso) print("End time of this scan " + etime.iso) btime = Time((btime.mjd - 1.0 / 60 / 24), format='mjd') etime = Time((etime.mjd + 1.0 / 60 / 24), format='mjd') startdate = btime.iso.replace(' ', ',')[:-7] enddate = etime.iso.replace(' ', ',')[:-7] cmd = [ "COMMAND= '10'", "CENTER= '-5@399'", "MAKE_EPHEM= 'YES'", "TABLE_TYPE= 'OBSERVER'", "START_TIME= '%s'" % startdate, "STOP_TIME= '%s'" % enddate, "STEP_SIZE= '1m'", "CAL_FORMAT= 'CAL'", "TIME_DIGITS= 'MINUTES'", "ANG_FORMAT= 'DEG'", "OUT_UNITS= 'KM-S'", "RANGE_UNITS= 'AU'", "APPARENT= 'AIRLESS'", "SOLAR_ELONG= '0,180'", "SUPPRESS_RANGE_RATE= 'NO'", "SKIP_DAYLT= 'NO'", "EXTRA_PREC= 'NO'", "R_T_S_ONLY= 'NO'", "REF_SYSTEM= 'J2000'", "CSV_FORMAT= 'YES'", "OBJ_DATA= 'YES'", "TIME_DIGITS ='MIN'", "QUANTITIES= '{}'".format(quantities) ] cmdstr = "http://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=l&" + '&'.join( cmd) try: context = ssl._create_unverified_context() f = urllib2.urlopen(cmdstr, context=context) except: f = urllib2.urlopen(cmdstr) lines = f.readlines() f.close() istart = 0 for i, l in enumerate(lines): if l[0:5] == '$$SOE': # start recording istart = i + 1 if l[0:5] == '$$EOE': # end recording iend = i if not ephemfile: ephemfile = 'sun-ephem-geo.txt' with open(ephemfile, 'w') as fb: for i, l in enumerate(lines): if i == istart - 3: fb.write( ' Date__(UT)__HR:MN R.A.___(J2000.0)___DEC. Ob-lon Ob-lat Sl-lon Sl-lat NP.ang NP.dist r rdot delta deldot S-T-O' ) if i >= istart and i < iend: l_s = l.split(',') l_s.pop(1) l_s.pop(1) fb.write(' '.join(l_s)) else: fb.write(l) # with open(ephemfile,'w') as fb: # for i,l in enumerate(lines): # fb.write(l.replace('*m','').replace('*t','')) return ephemfile