def read(self, file, nints, nskip, spw, selectpol, scan, datacol): """ Reads in Measurement Set data using CASA. spw is list of subbands. zero-based. Scan is zero-based selection based on scan order, not actual scan number. selectpol is list of polarization strings (e.g., ['RR','LL']) """ self.file = file self.scan = scan self.nints = nints # get spw info. either load pickled version (if found) or make new one pklname = string.join(file.split(".")[:-1], ".") + "_init.pkl" # pklname = pklname.split('/')[-1] # hack to remove path and write locally if os.path.exists(pklname): print "Pickle of initializing info found. Loading..." pkl = open(pklname, "r") try: (self.npol_orig, self.nbl, self.blarr, self.inttime, spwinfo, scansummary) = pickle.load(pkl) except EOFError: print "Bad pickle file. Exiting..." return 1 # old way, casa 3.3? # scanlist = scansummary['summary'].keys() # starttime_mjd = scansummary['summary'][scanlist[scan]]['0']['BeginTime'] # new way, casa 4.0? scanlist = scansummary.keys() starttime_mjd = scansummary[scanlist[scan]]["0"]["BeginTime"] self.nskip = int( nskip * self.nbl ) # number of iterations to skip (for reading in different parts of buffer) self.npol = len(selectpol) else: print "No pickle of initializing info found. Making anew..." pkl = open(pklname, "wb") ms.open(self.file) spwinfo = ms.getspectralwindowinfo() scansummary = ms.getscansummary() # original (general version) # scanlist = scansummary['summary'].keys() # starttime_mjd = scansummary['summary'][scanlist[scan]]['0']['BeginTime'] # starttime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0/(24.*60*60),'d'),form=['ymd'], prec=9), 's')) # stoptime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0.5/(24.*60*60), 'd'), form=['ymd'], prec=9), 's')) # for casa 4.0 (?) and later scanlist = scansummary.keys() # set time info self.inttime = scansummary[scanlist[scan]]["0"]["IntegrationTime"] self.inttime0 = self.inttime print "Initializing integration time (s):", self.inttime starttime_mjd = scansummary[scanlist[scan]]["0"]["BeginTime"] starttime0 = qa.getvalue( qa.convert( qa.time(qa.quantity(starttime_mjd + 0 / (24.0 * 60 * 60), "d"), form=["ymd"], prec=9)[0], "s" ) )[0] stoptime0 = qa.getvalue( qa.convert( qa.time(qa.quantity(starttime_mjd + self.inttime / (24.0 * 60 * 60), "d"), form=["ymd"], prec=9)[0], "s", ) )[0] ms.selectinit(datadescid=0) # initialize to initialize params selection = {"time": [starttime0, stoptime0]} ms.select(items=selection) da = ms.getdata([datacol, "axis_info"], ifraxis=True) ms.close() self.npol_orig = da[datacol].shape[0] self.nbl = da[datacol].shape[2] print "Initializing nbl:", self.nbl # good baselines bls = da["axis_info"]["ifr_axis"]["ifr_shortname"] self.blarr = n.array([[int(bls[i].split("-")[0]), int(bls[i].split("-")[1])] for i in xrange(len(bls))]) self.nskip = int( nskip * self.nbl ) # number of iterations to skip (for reading in different parts of buffer) pickle.dump((self.npol_orig, self.nbl, self.blarr, self.inttime, spwinfo, scansummary), pkl) pkl.close() self.ants = n.unique(self.blarr) self.nants = len(n.unique(self.blarr)) self.nants0 = len(n.unique(self.blarr)) print "Initializing nants:", self.nants self.npol = len(selectpol) print "Initializing %d of %d polarizations" % (self.npol, self.npol_orig) # set desired spw if (len(spw) == 1) & (spw[0] == -1): # spwlist = spwinfo['spwInfo'].keys() # old way spwlist = spwinfo.keys() # new way else: spwlist = spw self.freq_orig = n.array([]) for spw in spwlist: # new way nch = spwinfo[str(spw)]["NumChan"] ch0 = spwinfo[str(spw)]["Chan1Freq"] chw = spwinfo[str(spw)]["ChanWidth"] self.freq_orig = n.concatenate((self.freq_orig, (ch0 + chw * n.arange(nch)) * 1e-9)) # old way # nch = spwinfo['spwInfo'][str(spw)]['NumChan'] # ch0 = spwinfo['spwInfo'][str(spw)]['Chan1Freq'] # chw = spwinfo['spwInfo'][str(spw)]['ChanWidth'] self.freq = self.freq_orig[self.chans] self.nchan = len(self.freq) print "Initializing nchan:", self.nchan # set requested time range based on given parameters timeskip = self.inttime * nskip # new way starttime = qa.getvalue( qa.convert( qa.time(qa.quantity(starttime_mjd + timeskip / (24.0 * 60 * 60), "d"), form=["ymd"], prec=9)[0], "s" ) )[0] stoptime = qa.getvalue( qa.convert( qa.time( qa.quantity(starttime_mjd + (timeskip + nints * self.inttime) / (24.0 * 60 * 60), "d"), form=["ymd"], prec=9, )[0], "s", ) )[0] print "First integration of scan:", qa.time(qa.quantity(starttime_mjd, "d"), form=["ymd"], prec=9)[0] print # new way print "Reading scan", str(scanlist[scan]), "for times", qa.time( qa.quantity(starttime_mjd + timeskip / (24.0 * 60 * 60), "d"), form=["hms"], prec=9 )[0], "to", qa.time( qa.quantity(starttime_mjd + (timeskip + nints * self.inttime) / (24.0 * 60 * 60), "d"), form=["hms"], prec=9 )[ 0 ] # read data into data structure ms.open(self.file) ms.selectinit(datadescid=spwlist[0]) # reset select params for later data selection selection = {"time": [starttime, stoptime]} ms.select(items=selection) print "Reading %s column, SB %d, polarization %s..." % (datacol, spwlist[0], selectpol) ms.selectpolarization(selectpol) da = ms.getdata([datacol, "axis_info", "u", "v", "w", "flag"], ifraxis=True) u = da["u"] v = da["v"] w = da["w"] if da == {}: print "No data found." return 1 newda = n.transpose(da[datacol], axes=[3, 2, 1, 0]) # if using multi-pol data. flags = n.transpose(da["flag"], axes=[3, 2, 1, 0]) if len(spwlist) > 1: for spw in spwlist[1:]: ms.selectinit(datadescid=spw) # reset select params for later data selection ms.select(items=selection) print "Reading %s column, SB %d, polarization %s..." % (datacol, spw, selectpol) ms.selectpolarization(selectpol) da = ms.getdata([datacol, "axis_info", "flag"], ifraxis=True) newda = n.concatenate((newda, n.transpose(da[datacol], axes=[3, 2, 1, 0])), axis=2) flags = n.concatenate((flags, n.transpose(da["flag"], axes=[3, 2, 1, 0])), axis=2) ms.close() # Initialize more stuff... self.nschan0 = self.nchan # set variables for later writing data **some hacks here** self.nspect0 = 1 self.nwide0 = 0 self.sdf0 = da["axis_info"]["freq_axis"]["resolution"][0][0] * 1e-9 self.sdf = self.sdf0 self.ischan0 = 1 self.sfreq0 = da["axis_info"]["freq_axis"]["chan_freq"][0][0] * 1e-9 self.sfreq = self.sfreq0 self.restfreq0 = 0.0 self.pol0 = -1 # assumes single pol? # Assumes MS files store uvw in meters. Corrects by mean frequency of channels in use. self.u = u.transpose() * self.freq_orig[0] * (1e9 / 3e8) self.v = v.transpose() * self.freq_orig[0] * (1e9 / 3e8) self.w = w.transpose() * self.freq_orig[0] * (1e9 / 3e8) # set integration time and time axis ti = da["axis_info"]["time_axis"]["MJDseconds"] self.reltime = ti - ti[0] # define relative phase center for each integration self.l0 = n.zeros(self.nints) self.m0 = n.zeros(self.nints) self.rawdata = newda self.flags = n.invert( flags ) # tests show that MS has opposite flag convention as Miriad! using complement of MS flag in tpipe. print "Shape of raw data, time:" print self.rawdata.shape, self.reltime.shape
def __init__(self, file, nints=1000, nskip=0, ddid=-1, selectpol=['RR','LL'], scan=0, datacol='data'): """Initializes the class "obs". This creates new object containing data and metadata for an observation. It also includes functions to manipulate data and do transients analysis. Note that this uses CASA libraries in a way that requires it to be run from within "casapy". Use scan, nints, and nskip to control where to start. datacol specifies column of MS to read. Standard MS data columns are: 'data', 'corrected_data', 'model_data'. Examples of usage in python/casapy: import vla_tpipe obs = vla_tpipe.obs() -- create observation object for first file in a directory. print obs.data.shape -- see the structure of data read in. dimensions are (time, baseline, channel, polarization) results = obs.bisplc(show=1) -- create a bispectrum lightcurve and show any candidate transients. results are returned in 'return' object """ # critical parameters. may need to edit these ants = range(28) # set what antennas to use. default is to use "range" to specify all antennas with range(n_ant+1) self.file = file # get spw info. either load pickled version (if found) or make new one pklname = string.join(file.split('.')[:-1], '.') + '_init.pkl' if os.path.exists(pklname): print 'Pickle of initializing info found. Loading...' pkl = open(pklname, 'r') (self.npol_orig, self.npol, self.nbl, self.blarr, self.ants, self.nants, self.nants0, self.nskip, self.inttime, self.inttime0, spwinfo, scansummary) = pickle.load(pkl) scanlist = scansummary['summary'].keys() starttime_mjd = scansummary['summary'][scanlist[scan]]['0']['BeginTime'] self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer) else: print 'No pickle of initializing info found. Making anew...' pkl = open(pklname, 'wb') ms.open(self.file) spwinfo = ms.getspectralwindowinfo() scansummary = ms.getscansummary() scanlist = scansummary['summary'].keys() starttime_mjd = scansummary['summary'][scanlist[scan]]['0']['BeginTime'] starttime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0/(24.*60*60),'d'),form=['ymd'], prec=9), 's')) stoptime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0.5/(24.*60*60), 'd'), form=['ymd'], prec=9), 's')) ms.selectinit(datadescid=0) # initialize to initialize params selection = {'time': [starttime0, stoptime0], 'antenna1': ants, 'antenna2': ants} ms.select(items = selection) da = ms.getdata([datacol,'axis_info'], ifraxis=True) ms.close() self.npol_orig = da[datacol].shape[0] self.npol = len(selectpol) self.nbl = da[datacol].shape[2] print 'Initializing %d of %d polarizations' % (self.npol, self.npol_orig) print 'Initializing nbl:', self.nbl # good baselines bls = da['axis_info']['ifr_axis']['ifr_shortname'] self.blarr = n.array([[int(bls[i].split('-')[0]),int(bls[i].split('-')[1])] for i in range(len(bls))]) self.ants = n.unique(self.blarr) self.nants = len(self.ants) self.nants0 = len(self.ants) print 'Initializing nants:', self.nants self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer) # set integration time ti0 = da['axis_info']['time_axis']['MJDseconds'] # self.inttime = n.mean([ti0[i+1] - ti0[i] for i in range(len(ti0)-1)]) self.inttime = scansummary['summary'][scanlist[scan]]['0']['IntegrationTime'] self.inttime0 = self.inttime print 'Initializing integration time (s):', self.inttime pickle.dump((self.npol_orig, self.npol, self.nbl, self.blarr, self.ants, self.nants, self.nants0, self.nskip, self.inttime, self.inttime0, spwinfo, scansummary), pkl) pkl.close() # read in multiple subbands ("data id" in casa parlance). if ddid < 0: ddidlist = range(len(spwinfo['spwInfo'])) else: ddidlist = [ddid] freq = n.array([]) for ddid in ddidlist: nch = spwinfo['spwInfo'][str(ddid)]['NumChan'] ch0 = spwinfo['spwInfo'][str(ddid)]['Chan1Freq'] chw = spwinfo['spwInfo'][str(ddid)]['ChanWidth'] freq = n.concatenate( (freq, (ch0 + chw * n.arange(nch)) * 1e-9) ) # self.chans = n.array(range(2,62)) # can flag by omitting channels here self.chans = n.arange(nch*len(ddidlist)) # default is to take all chans self.freq = freq[self.chans] self.nchan = len(self.freq) self.track0 = [n.zeros(len(self.chans)), list(self.chans)] self.track0 = [n.zeros(len(self.chans)), list(self.chans)] # set requested time range based on given parameters timeskip = self.inttime*nskip starttime = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+timeskip/(24.*60*60),'d'),form=['ymd'], prec=9), 's')) stoptime = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+(timeskip+nints*self.inttime)/(24.*60*60), 'd'), form=['ymd'], prec=9), 's')) print 'First integration of scan:', qa.time(qa.quantity(starttime_mjd,'d'),form=['ymd'],prec=9) print print 'Reading from', qa.time(qa.quantity(starttime_mjd+timeskip/(24.*60*60),'d'),form=['hms'], prec=9), 'to', qa.time(qa.quantity(starttime_mjd+(timeskip+nints*self.inttime)/(24.*60*60), 'd'), form=['hms'], prec=9) # read data into data structure ms.open(self.file) ms.selectinit(datadescid=ddidlist[0]) # reset select params for later data selection selection = {'time': [starttime, stoptime], 'antenna1': ants, 'antenna2': ants} ms.select(items = selection) print 'Reading %s, SB %d, polarization %s...' % (datacol, ddidlist[0], selectpol) ms.selectpolarization(selectpol) da = ms.getdata([datacol,'axis_info'], ifraxis=True) if da == {}: print 'No data found.' return 1 newda = n.transpose(da[datacol], axes=[3,2,1,0]) # if using multi-pol data. if len(ddidlist) > 1: for ddid in ddidlist[1:]: ms.selectinit(datadescid=ddid) # reset select params for later data selection ms.select(items = selection) print 'Reading %s, SB %d, polarization %s...' % (datacol, ddid, selectpol) ms.selectpolarization(selectpol) da = ms.getdata([datacol,'axis_info'], ifraxis=True) newda = n.concatenate( (newda, n.transpose(da[datacol], axes=[3,2,1,0])), axis=2 ) ms.close() # check pol and baseline dimensions of data self.npol_orig = da[datacol].shape[0] self.npol = len(selectpol) self.nbl = da[datacol].shape[2] print 'Initializing %d of %d polarizations' % (self.npol, self.npol_orig) print 'Initializing nchan:', self.nchan print 'Initializing nbl:', self.nbl self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer) # create data structures # self.rawdata = newda[len(newda)/2:] # hack to remove autos self.rawdata = newda self.data = self.rawdata[:,:,self.chans] # remove channels ignored earlier self.dataph = (self.data.mean(axis=3).mean(axis=1)).real # create dataph, which is sum over all baselines. akin to calculating tied-array beam (possibly without calibration) self.min = self.dataph.min() self.max = self.dataph.max() print 'Shape of rawdata, data:' print self.rawdata.shape, self.data.shape print 'Dataph min, max:' print self.min, self.max # set integration time and time axis ti = da['axis_info']['time_axis']['MJDseconds'] self.reltime = ti - ti[0]
def __init__(self, filename, nints=1, nskip=0, ddid=-1, selectpol=['XX','YY']): """Initializes the class "mwa". This creates new object containing data and metadata for a set of files in a directory. It also includes functions to manipulate data and do some analysis, like making lightcurves, etc. Note that this uses CASA libraries in a way that requires it to be run from within "casapy". Other options exist elsewhere. Default is to read in first file (alphabetically) in the directory. Use nints and nskip to control where to start and number of files to read. this assumes that the alphabetical order is the time order. Examples of usage in python/casapy: import mwavis obs = mwavis.mwa('directory') -- create observation object for first file in a directory. print obs.data.shape -- see the structure of data read in. dimensions are (time, baseline, channel, polarization) results = obs.bisplc(show=1) -- create a bispectrum lightcurve and show any candidate transients. results are returned in 'return' object """ # critical parameters. need to edit these ants = range(64) # set what antennas to use. default is to use "range" to specify all antennas with range(n_ant+1) self.chans = n.array(range(64)) # set what channesl to use. default is to use range to select all channels. self.track0 = [n.zeros(len(self.chans)), self.chans] # open first file and read a bit of data to define data structure self.file = filename print 'Reading ', self.file ms.open(self.file) spwinfo = ms.getspectralwindowinfo() summary = ms.summary() # read in multiple subbands ("data id" in casa parlance). mwa probably doesn't use this. if ddid < 0: ddidlist = range(len(spwinfo['spwInfo'])) else: ddidlist = [ddid] freq = n.array([]) for ddid in ddidlist: nch = spwinfo['spwInfo'][str(ddid)]['NumChan'] ch0 = spwinfo['spwInfo'][str(ddid)]['Chan1Freq'] chw = spwinfo['spwInfo'][str(ddid)]['ChanWidth'] freq = n.concatenate( (freq, (ch0 + chw * n.arange(nch)) * 1e-9) ) self.freq = freq[self.chans] self.nchan = len(self.freq) # read data into data structure. start with subband 0, then iterate over higher ones. mwa probably only has 0. ms.selectinit(datadescid=0) # reset select params for later data selection selection = {'antenna1': ants, 'antenna2': ants} ms.select(items = selection) print 'Reading SB %d, polarization %s...' % (0, selectpol) ms.selectpolarization(selectpol) da = ms.getdata(['data','axis_info'], ifraxis=True) if da == {}: print 'No data found.' return 1 newda = n.transpose(da['data'], axes=[3,2,1,0]) # if using multi-pol data. if len(ddidlist) > 1: for ddid in ddidlist[1:]: ms.selectinit(datadescid=ddid) # reset select params for later data selection ms.select(items = selection) print 'Reading SB %d, polarization %s...' % (ddid, selectpol) ms.selectpolarization(selectpol) da = ms.getdata(['data','axis_info'], ifraxis=True) newda = n.concatenate( (newda, n.transpose(da['data'], axes=[3,2,1,0])), axis=2 ) ms.close() rawdata = newda # array for collecting raw data # check pol and baseline dimensions of data self.npol_orig = da['data'].shape[0] self.npol = len(selectpol) self.nbl = da['data'].shape[2] print 'Initializing %d of %d polarizations' % (self.npol, self.npol_orig) print 'Initializing nchan:', self.nchan print 'Initializing nbl:', self.nbl self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer) # set number of antennas and names of baselines bls = da['axis_info']['ifr_axis']['ifr_shortname'] self.blarr = n.array([[bls[i].split('-')[0],bls[i].split('-')[1]] for i in range(len(bls))]) self.ants = n.unique(self.blarr) self.nants = len(self.ants) print 'Initializing nants:', self.nants # set integration time and time axis ti = da['axis_info']['time_axis']['MJDseconds'] self.inttime = n.mean([ti[i+1] - ti[i] for i in range(len(ti)-1)]) print 'Initializing integration time (s):', self.inttime self.reltime = ti - ti[0] if len(msfiles) > 1: for file in msfiles[1:]: ms.open(file) print 'Reading ', file spwinfo = ms.getspectralwindowinfo() # read in multiple subbands ("data id" in casa parlance). mwa probably doesn't use this. if ddid < 0: ddidlist = range(len(spwinfo['spwInfo'])) else: ddidlist = [ddid] # read data into data structure. start with subband 0, then iterate over higher ones. mwa probably only has 0. ms.selectinit(datadescid=0) # reset select params for later data selection ms.select(items = selection) print 'Reading SB %d, polarization %s...' % (0, selectpol) ms.selectpolarization(selectpol) da = ms.getdata(['data','axis_info'], ifraxis=True) if da == {}: print 'No data found.' return 1 newda = n.transpose(da['data'], axes=[3,2,1,0]) # if using multi-pol data. if len(ddidlist) > 1: for ddid in ddidlist[1:]: ms.selectinit(datadescid=ddid) # reset select params for later data selection ms.select(items = selection) print 'Reading SB %d, polarization %s...' % (ddid, selectpol) ms.selectpolarization(selectpol) da = ms.getdata(['data','axis_info'], ifraxis=True) newda = n.concatenate( (newda, n.transpose(da['data'], axes=[3,2,1,0])), axis=2 ) ms.close() rawdata = n.concatenate( (rawdata, newda), axis=0) # create data structures self.rawdata = rawdata self.data = rawdata[:,:,self.chans] # remove channels ignored earlier self.dataph = (self.data.mean(axis=3).mean(axis=1)).real # create dataph, which is sum over all baselines. akin to calculating tied-array beam (possibly without calibration) self.min = self.dataph.min() self.max = self.dataph.max() print 'Shape of rawdata, data:' print self.rawdata.shape, self.data.shape print 'Dataph min, max:' print self.min, self.max
def __init__(self, filename, nints=1, nskip=0, selectpol=['XX','YY'], datacol='corrected'): """Initializes the class "lofar". This creates new object containing data and metadata for a set of files in a directory. It also includes functions to manipulate data and do some analysis, like making lightcurves, etc. Note that this uses CASA libraries in a way that requires it to be run from within "casapy". Other options exist elsewhere. Default is to read in first file (alphabetically) in the directory. Use nints and nskip to control where to start and number of files to read. this assumes that the alphabetical order is the time order. Examples of usage in python/casapy: import lofarvis obs = lofarvis.lofar('directory') -- create observation object for first file in a directory. print obs.data.shape -- see the structure of data read in. dimensions are (time, baseline, channel, polarization) results = obs.bisplc(show=1) -- create a bispectrum lightcurve and show any candidate transients. results are returned in 'return' object """ # critical parameters. need to edit these ants = range(25) # set what antennas to use. default is to use "range" to specify all antennas with range(n_ant+1) self.chans = n.array(range(1)) # set what channesl to use. default is to use range to select all channels. self.track0 = [n.zeros(len(self.chans)), self.chans] # open file and read a bit of data to define data structure self.file = filename print 'Reading ', self.file ms.open(self.file) spwinfo = ms.getspectralwindowinfo() summary = ms.summary() scansummary = ms.getscansummary() starttime_mjd = scansummary['summary']['0']['0']['BeginTime'] starttime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0/(24.*60*60),'d'),form=['ymd'], prec=9), 's')) stoptime0 = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+0.5/(24.*60*60), 'd'), form=['ymd'], prec=9), 's')) ms.selectinit(datadescid=0) # initialize to initialize params selection = {'time': [starttime0, stoptime0], 'antenna1': ants, 'antenna2': ants} da = ms.getdata([datacol,'axis_info', 'u', 'v', 'w'], ifraxis=True) ms.close() self.npol_orig = da[datacol].shape[0] self.npol = len(selectpol) self.nbl = da[datacol].shape[2] print 'Initializing %d of %d polarizations' % (self.npol, self.npol_orig) print 'Initializing nbl:', self.nbl # good baselines bls = da['axis_info']['ifr_axis']['ifr_shortname'] self.blarr = n.array([[bls[i].split('-')[0],bls[i].split('-')[1]] for i in range(len(bls))]) self.ants = n.unique(self.blarr) self.nants = len(self.ants) self.nants0 = len(self.ants) print 'Initializing nants:', self.nants self.nskip = int(nskip*self.nbl) # number of iterations to skip (for reading in different parts of buffer) # set integration time ti0 = da['axis_info']['time_axis']['MJDseconds'] self.inttime = scansummary['summary']['0']['0']['IntegrationTime'] self.inttime0 = self.inttime print 'Initializing integration time (s):', self.inttime nch = spwinfo['spwInfo']['0']['NumChan'] ch0 = spwinfo['spwInfo']['0']['Chan1Freq'] chw = spwinfo['spwInfo']['0']['ChanWidth'] freq = (ch0 + chw * n.arange(nch)) * 1e-9 self.freq = freq[self.chans] self.nchan = len(self.freq) # read data into data structure. start with subband 0, then iterate over higher ones. lofar probably only has 0. timeskip = self.inttime*nskip starttime = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+timeskip/(24.*60*60),'d'),form=['ymd'], prec=9), 's')) stoptime = qa.getvalue(qa.convert(qa.time(qa.quantity(starttime_mjd+(timeskip+nints*self.inttime)/(24.*60*60), 'd'), form=['ymd'], prec=9), 's')) print 'First integration:', qa.time(qa.quantity(starttime_mjd,'d'),form=['ymd'],prec=9) print print 'Reading times', qa.time(qa.quantity(starttime_mjd+timeskip/(24.*60*60),'d'),form=['hms'], prec=9), 'to', qa.time(qa.quantity(starttime_mjd+(timeskip+nints*self.inttime)/(24.*60*60), 'd'), form=['hms'], prec=9) ms.open(self.file) ms.selectinit(datadescid=0) # reset select params for later data selection selection = {'time': [starttime, stoptime], 'antenna1': ants, 'antenna2': ants} ms.select(items = selection) print 'Reading SB %d, polarization %s...' % (0, selectpol) ms.selectpolarization(selectpol) da = ms.getdata([datacol,'axis_info', 'u', 'v', 'w'], ifraxis=True) u = da['u']; v = da['v']; w = da['w'] if da == {}: print 'No data found.' return 1 newda = n.transpose(da[datacol], axes=[3,2,1,0]) # if using multi-pol data. ms.close() rawdata = newda # array for collecting raw data # create data structures self.u = u.transpose() * (-self.freq.mean()*1e9/3e8) # uvw are in m on ground. scale by -wavelenth to get projected lamba uvw (as in miriad?) self.v = v.transpose() * (-self.freq.mean()*1e9/3e8) self.w = w.transpose() * (-self.freq.mean()*1e9/3e8) self.rawdata = rawdata self.data = rawdata[:,:,self.chans] # remove channels ignored earlier self.dataph = (self.data.mean(axis=3).mean(axis=1)).real # create dataph, which is sum over all baselines. akin to calculating tied-array beam (possibly without calibration) self.min = self.dataph.min() self.max = self.dataph.max() print 'Shape of rawdata, data:' print self.rawdata.shape, self.data.shape print 'Dataph min, max:' print self.min, self.max ti = da['axis_info']['time_axis']['MJDseconds'] self.reltime = ti - ti[0]
def read_data_from_ms(msfn, viscol="DATA", noisecol='SIGMA', mode='tot',noise_est = False): """ Reads polarization or total intensity data in visibility and noise arrays. Args: msfn: Name of the MeasurementSet file from which to read the data viscol: A string with the name of the MS column from which to read the data [DATASET_STRING] noisecol: A string with the name of the MS column from which to read the noise or weights ['SIGMA'] mode: Flag to set whether the function should read in polarization data ('pol') or total intensity data ('tot') Returns: vis noise """ m = M.Messenger(2) if mode == 'pol': m.header2("Reading polarization data from the MeasurementSet...") if mode == 'tot': m.header2("Reading total intensity data from the MeasurementSet...") viscol = viscol.lower() noisecol = noisecol.lower() if noise_est: m.message("Performing simple noise estimate") computenoise(msfn,viscol,m,minsamp=10) ms.open(msfn) meta = ms.metadata() nspw = range(meta.nspw()) nchan = [] nvis = [] u = [] v = [] freq = [] if mode == 'pol': Qvis = [] Uvis = [] Qsigma = [] Usigma = [] lambs2 = [] if mode == 'tot': vis = [] sigma = [] # the Q,U OR the I part of the S Jones matrix (hence Spart) # from the Stokes enumeration defined in the casa core libraries # http://www.astron.nl/casacore/trunk/casacore/doc/html \ #/classcasa_1_1Stokes.html#e3cb0ef26262eb3fdfbef8273c455e0c # this defines which polarization type the data columns correspond to corr = ms.getdata(['axis_info'])['axis_info']['corr_axis'] corr_announce = "Correlation type detected to be " ii = complex(0, 1) if mode == 'pol': if corr[0] == 'RR': # RR, RL, LR, LL QSpart = np.array([0, 0.5, 0.5, 0]) USpart = np.array([0, -0.5 * ii, 0.5 * ii, 0]) corr_announce += "RR, RL, LR, LL" elif corr[0] == 'I': # I, Q, U, V QSpart = np.array([0, 1., 0, 0]) USpart = np.array([0, 0, 1., 0]) corr_announce += "I, Q, U, V" elif corr[0] == 'XX': # XX, XY, YX, YY QSpart = np.array([0.5, 0, 0, -0.5]) USpart = np.array([0, 0.5, 0.5, 0]) corr_announce += "XX, XY, YX, YY" if mode == 'tot': if corr[0] == 'RR': # RR, RL, LR, LL Spart = np.array([0.5, 0, 0, 0.5]) corr_announce += "RR, RL, LR, LL" elif corr[0] == 'I': # I, Q, U, V Spart = np.array([1., 0, 0, 0]) corr_announce += "I, Q, U, V" elif corr[0] == 'XX': # XX, XY, YX, YY Spart = np.array([0.5, 0, 0, 0.5]) corr_announce += "XX, XY, YX, YY" print corr_announce for spw in nspw: nchan.append(meta.nchan(spw)) ms.selectinit(datadescid=spw) temp = ms.getdata([viscol,"axis_info"],ifraxis=False) data_temp = temp[viscol] info_temp = temp["axis_info"] s_temp = ms.getdata(["sigma"], ifraxis=False)['sigma'] flags = 1. - ms.getdata(["flag"])['flag'] if not(np.sum(flags[0])==np.sum(flags[1])==np.sum(flags[2])==np.sum(flags[3])): m.warn('Warning: Different flags for different correlations/channels.'+ 'Hard flag is applied: If any correlation is flagged, this gets'+ 'extended to all.') maximum = np.ones(np.shape(flags[0])) for i in range(4): if flags[i].sum() < maximum.sum(): maximum = flags[i] flag = maximum else: flag = flags[0] #Start reading data. if mode == 'tot': vis_temp = flag * (Spart[0] * data_temp[0] + Spart[1] * data_temp[1] + Spart[2] *\ data_temp[2] + Spart[3] * data_temp[3]) sigma_temp = flag * (Spart[0] * s_temp[0] + Spart[1] * s_temp[1] + Spart[2] * s_temp[2] +\ Spart[3] * s_temp[3]) vis.append(vis_temp) sigma.append(sigma_temp) nvis.append(len(vis_temp[0])) if mode == 'pol': Qvis_temp = flag * (QSpart[0] * data_temp[0] + QSpart[1] * data_temp[1] + QSpart[2] *\ data_temp[2] + QSpart[3] * data_temp[3]) Qsigma_temp = flag * (QSpart[0] * s_temp[0] + QSpart[1] * s_temp[1] + QSpart[2] * s_temp[2] +\ QSpart[3] * s_temp[3]) Qvis.append(Qvis_temp) Qsigma.append(Qsigma_temp) Uvis_temp = flag * (USpart[0] * data_temp[0] + USpart[1] * data_temp[1] + USpart[2] *\ data_temp[2] + USpart[3] * data_temp[3]) Usigma_temp = flag * (USpart[0] * s_temp[0] + USpart[1] * s_temp[1] + USpart[2] * s_temp[2] +\ USpart[3] * s_temp[3]) Uvis.append(Uvis_temp) Usigma.append(Usigma_temp) nvis.append(len(Uvis_temp[0])) #uvflat give all uv coordinates for the chosen spw in m uflat = ms.getdata(['u'])['u'] vflat = ms.getdata(['v'])['v'] freqtemp = info_temp["freq_axis"]["chan_freq"] freq.append(freqtemp) lamb = C / freqtemp if mode == 'pol': lambs2.append(lamb**2 / PI) #calculates uv coordinates per channel in #lambda utemp = np.array([uflat/k for k in lamb]) vtemp = np.array([vflat/k for k in lamb]) #Reads the uv coordates into lists. Delete functions take care of flags. u.append(utemp) v.append(vtemp) try: summary = ms.summary() except: print "Warning: Could not create a summary" summary = None ms.close() if mode =='tot': return vis, sigma, u, v, freq, nchan, nspw, nvis, summary if mode == 'pol': return Qvis, Qsigma, Uvis, Usigma, freq, lamb, u, v, nchan, nspw, nvis
def read_pyratdata_from_ms(msfn, vis, noise, viscol="DATA", noisecol='SIGMA', mode='pol'): """ Reads polarization or total intensity data in visibility and noise arrays. Args: msfn: Name of the MeasurementSet file from which to read the data visp: Pyrat data object noisep: Pyrat data object viscol: A string with the name of the MS column from which to read the data [DATASET_STRING] noisecol: A string with the name of the MS column from which to read the noise or weights ['SIGMA'] mode: Flag to set whether the function should read in polarization data ('pol') or total intensity data ('tot') Returns: vis noise """ #Attention, in current setting, pyrat can't handle properly different flags on separate channels # Messenger object for displaying messages m = vis.m if vis._initialized and noise._initialized: m.warn("Requested data objects already exist. Using the " + "previously parsed data.") return if mode == 'pol': m.header2("Reading polarization data from the MeasurementSet...") if mode == 'tot': m.header2("Reading total intensity data from the MeasurementSet...") viscol = viscol.lower() noisecol = noisecol.lower() ms.open(msfn) meta = ms.metadata() nspw = range(meta.nspw()) nchan = [] u = [] v = [] freq = [] if mode == 'pol': lambs = [] # the Q,U OR the I part of the S Jones matrix (hence Spart) # from the Stokes enumeration defined in the casa core libraries # http://www.astron.nl/casacore/trunk/casacore/doc/html \ #/classcasa_1_1Stokes.html#e3cb0ef26262eb3fdfbef8273c455e0c # this defines which polarization type the data columns correspond to corr = ms.getdata(['axis_info'])['axis_info']['corr_axis'] corr_announce = "Correlation type detected to be " ii = complex(0, 1) if mode == 'pol': if corr[0] == 'RR': # RR, RL, LR, LL QSpart = np.array([0, 0.5, 0.5, 0]) USpart = np.array([0, -0.5 * ii, 0.5 * ii, 0]) corr_announce += "RR, RL, LR, LL" elif corr[0] == 'I': # I, Q, U, V QSpart = np.array([0, 1., 0, 0]) USpart = np.array([0, 0, 1., 0]) corr_announce += "I, Q, U, V" elif corr[0] == 'XX': # XX, XY, YX, YY QSpart = np.array([0.5, 0, 0, -0.5]) USpart = np.array([0, 0.5, 0.5, 0]) corr_announce += "XX, XY, YX, YY" if mode == 'tot': if corr[0] == 'RR': # RR, RL, LR, LL Spart = np.array([0.5, 0, 0, 0.5]) corr_announce += "RR, RL, LR, LL" elif corr[0] == 'I': # I, Q, U, V Spart = np.array([1., 0, 0, 0]) corr_announce += "I, Q, U, V" elif corr[0] == 'XX': # XX, XY, YX, YY Spart = np.array([0.5, 0, 0, 0.5]) corr_announce += "XX, XY, YX, YY" m.message(corr_announce) for spw in nspw: nchan.append(meta.nchan(spw)) ms.selectinit(datadescid=spw) temp = ms.getdata([viscol,"axis_info"],ifraxis=False) data_temp = temp[viscol] info_temp = temp["axis_info"] s_temp = ms.getdata(["sigma"], ifraxis=False)['sigma'] flags = 1. - ms.getdata(["flag"])['flag'] if not(np.sum(flags[0])==np.sum(flags[1])==np.sum(flags[2])==np.sum(flags[3])): m.warn('Warning: Different flags for different correlations/channels.\ Hard flag is applied: If any correlation is flagged, this gets \ extended to all.') maximum = np.ones(np.shape(flags[0])) for i in range(4): if flags[i].sum() < maximum.sum(): maximum = flags[i] flag = maximum if (np.sum(flag)==0): m.warn('Spw ' + str(spw) + ' is completely flagged!\n') continue #Start reading data. if mode == 'tot': vis_temp = flag * (Spart[0] * data_temp[0] + Spart[1] * data_temp[1] + Spart[2] *\ data_temp[2] + Spart[3] * data_temp[3]) sigma_temp = flag * (Spart[0] * s_temp[0] + Spart[1] * s_temp[1] + Spart[2] * s_temp[2] +\ Spart[3] * s_temp[3]) vislist = np.array(vis_temp) sigmalist = np.array(sigma_temp) if mode == 'pol': Qvis_temp = flag * (QSpart[0] * data_temp[0] + QSpart[1] * data_temp[1] + QSpart[2] *\ data_temp[2] + QSpart[3] * data_temp[3]) Qsigma_temp = flag * (QSpart[0] * s_temp[0] + QSpart[1] * s_temp[1] + QSpart[2] * s_temp[2] +\ QSpart[3] * s_temp[3]) Uvis_temp = flag * (USpart[0] * data_temp[0] + USpart[1] * data_temp[1] + USpart[2] *\ data_temp[2] + USpart[3] * data_temp[3]) Usigma_temp = flag * (USpart[0] * s_temp[0] + USpart[1] * s_temp[1] + USpart[2] * s_temp[2] +\ USpart[3] * s_temp[3]) Qvislist = np.array(Qvislist) Qsigmalist = np.array(Qsigmalist) Uvislist = np.array(Uvislist) Usigmalist = np.array(Usigmalist) # uvflat give all uv coordinates for the chosen spw in m uflat = ms.getdata(['u'])['u'] vflat = ms.getdata(['v'])['v'] freqtemp = (info_temp["freq_axis"]["chan_freq"]).reshape(nchan[spw]) freq.append(freqtemp) lamb = C / freqtemp if mode == 'pol': lambs.append(lamb) #calculates uv coordinates per channel in #lambda utemp = np.array([uflat/k for k in lamb]) vtemp = np.array([vflat/k for k in lamb]) #Reads the uv coordates into lists. Delete functions take care of flags u.append(utemp) v.append(vtemp) if mode == 'tot': vis.init_subgroup(spw, freqtemp, nrecs) noise.init_subgroup(spw, freqtemp, nrecs) for k in range(nchan[spw]): vis.store_records(vislist[k], spw, k) noise.store_records(sigmalist[k], spw, k) vis.coords.put_coords(uchanlist[k],vchanlist[k],spw,k) if mode == 'pol': vis.init_subgroup(spw, lamb**2 / PI, nrecs) noise.init_subgroup(spw, lamb**2 / PI, nrecs) noise_array = np.real(np.sqrt(Qsigmalist * Qsigmalist.conjugate() + Usigmalist * Usigmalist.conjugate())) for h in range(nchan[spw]): vis.store_records([Qvislist[h],Uvislist[h]], spw, h) noise.store_records(noise_array[h], spw, h) vis.coords.put_coords(uchanlist[h],vchanlist[h],spw,h) ms.close()