def polconvert(ms='temp/temp.lofar_xyz.ms'):
    print "*** Converting from linear to circular polarisation ***"
    import pyrap.tables as pt
    t = pt.taql("update %s/ set DATA = mscal.stokes(DATA,'circ')" % ms)
    t = pt.taql("update %s/POLARIZATION set CORR_TYPE=[5,6,7,8]" % ms)
    t.close()
    print 'converted linear X-Y to circular R-L polarization'
Example #2
0
    def getMessages(self, start_time, end_time, start_freq, end_freq, iteration="last"):
        messagesDict={}       # create an empty dictionary
        # return all iterations (default behaviour)
        if iteration == "all":
           messagesDict["result"]="all"

           # Loop over all iterations
           for iter in range(1, self.getMaxIter()+1):
                 taqlcmd="SELECT * FROM " + self.tablename + " WHERE STARTTIME>=" + str(start_time) + " AND ENDTIME<=" + str(end_time) + " AND STARTFREQ>=" + str(start_freq) + " AND ENDFREQ<=" + str(end_freq) + " AND ITER=" + str(iter)
                 result=pt.taql(taqlcmd)           # execute TaQL command
                 messagesDict[iter]=result.getcol("MESSAGE")

        # return the last iteration only
        elif iteration == "Last" or iteration == "last":
           #print "readCells(): last"        # DEBUG
           messagesDict["result"]="last"

           taqlcmd="SELECT * FROM " + self.tablename + " WHERE STARTTIME>=" + str(start_time) + " AND ENDTIME<=" + str(end_time) + " AND STARTFREQ>=" + str(start_freq) + " AND ENDFREQ<=" + str(end_freq) + " AND LASTITER=TRUE"
           result=pt.taql(taqlcmd)           # execute TaQL command
           
           print "result.nrows() = ", result.nrows()
           
           messagesDict["last"]=result.getcol("MESSAGE")

        # return only a particular iteration
        elif type(iteration).__name__ == "int":
            #print "iteration: ", iteration    # DEBUG
            messagesDict["result"]="iteration"
            taqlcmd="SELECT * FROM " + self.tablename + + " WHERE STARTTIME>=" + str(start_time) + " AND ENDTIME<=" + str(end_time) + " AND STARTFREQ=" + str(start_freq) + " AND ENDFREQ=" + str(end_freq) + " AND ITER=" + str(iteration) + " ORDERBY STARTFREQ"
            result=pt.taql(taqlcmd)        # execute TaQL command      
            
            messagesDict[iteration]=result.getcol("MESSAGE")

        return messagesDict
Example #3
0
def addcol(ms, incol, outcol):
    if outcol not in ms.colnames():
        logging.info('Adding column: ' + outcol)
        coldmi = ms.getdminfo(incol)
        coldmi['NAME'] = outcol
        ms.addcols(pt.makecoldesc(outcol, ms.getcoldesc(incol)), coldmi)
    if outcol != incol:
        # copy columns val
        logging.info('Set ' + outcol + '=' + incol)
        pt.taql("update $ms set " + outcol + "=" + incol)
Example #4
0
def addcol(ms, incol, outcol):
    if outcol not in ms.colnames():
        logging.info('Adding column: '+outcol)
        coldmi = ms.getdminfo(incol)
        coldmi['NAME'] = outcol
        ms.addcols(pt.makecoldesc(outcol, ms.getcoldesc(incol)), coldmi)
    if outcol != incol:
        # copy columns val
        logging.info('Set '+outcol+'='+incol)
        pt.taql("update $ms set "+outcol+"="+incol)
Example #5
0
    def run(self, infile, baseline_filename):
        """
        baseline_filename points to a file continaing a pickled array of
        antenna pairs.
        """
        with log_time(self.logger):
            if os.path.exists(infile):
                self.logger.info("Processing %s" % (infile))
            else:
                self.logger.error("Dataset %s does not exist" % (infile))
                return 1

            if not os.path.exists(baseline_filename):
                self.logger.error("baseline file %s not found" %
                                  (baseline_filename))
                return 1

            with open(baseline_filename) as file:
                baselines = load(file)

            antenna1, antenna2 = [], []
            for baseline in baselines:
                ant1, ant2 = baseline.split("&")
                antenna1.append(int(ant1))
                antenna2.append(int(ant2))

            if antenna1 and antenna2:
                cmd = "UPDATE %s SET FLAG=True WHERE any(ANTENNA1=%s and ANTENNA2=%s)" % \
                    (infile, str(antenna1), str(antenna2))
                self.logger.info("Running TaQL: " + cmd)

                try:
                    taql(cmd)
                except Exception as e:
                    self.logger.warn(str(e))
                    return 1
            else:
                self.logger.warn("No baselines specified to flag")

            # QUICK HACK: Also flag last timestep
            t = table(infile)
            maxtime = t.getcol('TIME').max()
            t.close()
            cmd = "UPDATE %s SET FLAG=True WHERE TIME=%f" % (infile, maxtime)
            self.logger.info("Running TaQL: " + cmd)
            try:
                taql(cmd)
            except Exception as e:
                self.logger.warn(str(e))
                return 1

        return 0
Example #6
0
    def readTimeColumn(self, parameter, iteration="all"):
        print "readTimeColumn(self, parameter, iteration=", iteration ,"):"   # DEBUG
        
        # Get first all unique time slots
        if self.timeSlots.nrows()==0:
            self.timeSlots=self.getTimeSlots()

        # Get MAXITER first
        maxIter=self.getMaxIter()
        print "maxIter: ", maxIter

        parmsDict={}

        # return all iterations (default behaviour)
        if iteration == "all":
           parmsDict["result"]="all"
   
           # Loop over all iterations
           for iter in range(1, maxIter+1):
                 taqlcmd="SELECT DISTINCT STARTTIME, ENDTIME, ITER, " + parameter + " FROM " + self.tablename + " WHERE ITER=" + str(iter)
                 selection=pt.taql(taqlcmd)              # execute TaQL command
                 parmIter=selection.getcol(parameter)    # select column with wanted parameter
                 print "readTimeColumn-type(parmIter): ", type(parmIter)
                 parmsDict[iter]=parmIter
           return parmsDict

        # return the last iteration only
        elif iteration == "Last" or iteration == "last":
           parmsDict["result"]="last"

           taqlcmd="SELECT DISTINCT STARTTIME, ENDTIME, ITER, " + parameter + " FROM " + self.tablename + " WHERE LASTITER=TRUE"
           selection=pt.taql(taqlcmd)           # execute TaQL command
           parmDict["last"]=selection.getcol(parameter)    # select column with wanted parameter

           return parmsDict

        # return only a particular iteration
        elif type(iteration).__name__==int:
            parmsDict["result"]="iteration"

            taqlcmd="SELECT DISTINCT STARTTIME, ENDTIME, ITER FROM " + self.tablename + " WHERE ITER=" + str(iteration) + " ORDERBY STARTTIME"
            selection=pt.taql(taqlcmd)           # execute TaQL command      
            parmsDict[iteration]=selection.getcol(parameter)    # select column with wanted parameter

            return parmsDict

        else:
            parmsDict["result"]=False
            return parmsDict
Example #7
0
    def getCorrMatrix(self, start_time, end_time, start_freq, end_freq, getStartTimes=True, getRank=False):
        start_time, end_time=self.fuzzyTime(start_time, end_time)
        start_freq, end_freq=self.fuzzyFreq(start_freq, end_freq)

        corrMatrix=[]    # list to contain returned corrMatrices

        # LASTITER=TRUE (Correlation matrix is only recorded by the solver for the last iteration)
        taqlcmd="SELECT STARTTIME, CORRMATRIX FROM " + self.tablename + " WHERE STARTTIME >= "+ str(start_time) + " AND ENDTIME <= " + str(end_time) + " AND STARTFREQ >= " + str(start_freq) + " AND ENDFREQ <= " + str(end_freq) + " AND LASTITER=TRUE"

        result=pt.taql(taqlcmd)
        rank=self.getRank(start_time, end_time, start_freq, end_freq)          # get the RANK from this cell
        #rankDef=self.getRankDef(start_time, end_time, start_freq, end_freq)    # rank deficiency

        #print "solverQuery::getCorrMatrix() result.nrows() = ", result.nrows()   # DEBUG

        # This is needed if we get back more than one result (but is buggy now)
        #if self.type=="PERITERATION_CORRMATRIX":
        #    result=result[1]['CORRMATRIX']
        #else:
        #    result=result[0]['CORRMATRIX']
  
        # The Corrmatrix will only be for (N+1)th iteration
        if result.nrows()==1:
          corrMatrix=result[0]['CORRMATRIX']  # select CORRMATRIX and write to numpy 1D-array
        else:
          corrMatrix=result[1]['CORRMATRIX']  # select CORRMATRIX and write to numpy 1D-array

        if getStartTimes==True and getRank==True:
            starttimes=result[1].getcol('STARTTIME')  # also select starttimes
            return corrMatrix, starttimes, getRank
        elif getStartTimes==False and getRank==True: 
            return corrMatrix, getRank
        else:
            return corrMatrix
Example #8
0
 def field_size_ateam(table):
     logging.debug('Computing field size for A-team')
     fieldtable = table.getkeyword('FIELD').split()[1]
     taqloutput = pt.taql(
         "calc from %s calc max(angdist (DELAY_DIR[0,], [%s]))" %
         (fieldtable, ", ".join(",".join(src) for src in ATEAM)))
     return taqloutput[0]
def losotolofarbeam(parmdb,
                    soltabname,
                    ms,
                    inverse=False,
                    useElementResponse=True,
                    useArrayFactor=True,
                    useChanFreq=True):
    """ Do the beam correction via this imported losoto operation

    Args:
        parmdb (str): name of the h5parm to work on.
        soltabname (str): name of the soltab to operate on.
        inverse (bool): apply the inverse beam correction.
        useElementResponse (bool): apply the element beam correction.
        useArrayFactor (bool): apply the array factor correction.
        useChanFreq (bool): operate per channel.
    """
    H5 = h5parm.h5parm(parmdb, readonly=False)
    soltab = H5.getSolset('sol000').getSoltab(soltabname)

    sr = stationresponse(ms, inverse, useElementResponse, useArrayFactor,
                         useChanFreq)

    numants = pt.taql('select gcount(*) as numants from ' + ms +
                      '::ANTENNA').getcol('numants')[0]
    times = soltab.getAxisValues('time')

    for vals, coord, selection in soltab.getValuesIter(
            returnAxes=['ant', 'time', 'pol', 'freq'], weight=False):
        vals = losoto.lib_operations.reorderAxes(
            vals, soltab.getAxesNames(), ['ant', 'time', 'freq', 'pol'])

        for stationnum in range(numants):
            logging.debug('Working on station number %i' % stationnum)
            for itime, time in enumerate(times):
                beam = sr.evaluateStation(time=time, station=stationnum)
                # Reshape from [nfreq, 2, 2] to [nfreq, 4]
                beam = beam.reshape(beam.shape[0], 4)

                if soltab.getAxisLen('pol') == 2:
                    beam = beam[:, [0, 3]]  # get only XX and YY

                if soltab.getType() == 'amplitude':
                    vals[stationnum, itime, :, :] = np.abs(beam)
                elif soltab.getType() == 'phase':
                    vals[stationnum, itime, :, :] = np.angle(beam)
                else:
                    logging.error(
                        'Beam prediction works only for amplitude/phase solution tables.'
                    )
                    return 1

        vals = losoto.lib_operations.reorderAxes(
            vals, ['ant', 'time', 'freq', 'pol'], [
                ax for ax in soltab.getAxesNames()
                if ax in ['ant', 'time', 'freq', 'pol']
            ])
        soltab.setValues(vals, selection)

    H5.close()
Example #10
0
    def run(self, infile):
        with log_time(self.logger):
            if os.path.exists(infile):
                self.logger.info("Processing %s" % (infile))
            else:
                self.logger.error("Dataset %s does not exist" % (infile))
                return 1

            try:
                self.outputs['start_time'] = taql(
                    "CALC MIN([SELECT TIME from %s])" % infile)[0]
                self.outputs['end_time'] = taql(
                    "CALC MAX([SELECT TIME from %s])" % infile)[0]
            except Exception, e:
                self.logger.error(str(e))
                return 1
Example #11
0
 def setFreqs(self):
     if len(self.frequencies) == 0:
         taqlcmd="SELECT UNIQUE STARTFREQ, ENDFREQ FROM " + self.tablename
         self.frequencies=pt.taql(taqlcmd)
         
         self.startFreqs=self.frequencies.getcol("STARTFREQ")
         self.endFreqs=self.frequencies.getcol("ENDFREQ")
Example #12
0
    def readFreqColumn(self, parameter, iteration="all"):
        #print "readFreqColumn(self, parameter, iteration=", iteration, ":"    # DEBUG

        # Get first all unique frequencies
        if len(self.frequencies)==0:
            self.frequencies=getFreqs()

        # Get MAXITER first
        #maxIter=pt.tablecolumn(self.solverTable, "MAXITER")[0]

        parmsDict={}       # create an empty dictionary

        # return all iterations (default behaviour)
        if iteration == "all":
           parmsDict["result"]="all"

           # Loop over all iterations
           for iter in range(1, self.getMaxIter()+1):
                 taqlcmd="SELECT DISTINCT STARTFREQ, ENDFREQ, ITER, " + parameter + " FROM " + self.tablename + " WHERE ITER=" + str(iter)
                 selection=pt.taql(taqlcmd)              # execute TaQL command
                 parmIter=selection.getcol(parameter)    # select column with wanted parameter
                 parmsDict[iter]=parmIter                    # write into dictionary__
           return parmsDict

        # return the last iteration only
        elif iteration == "Last" or iteration == "last":
            parmsDict["result"]="last"

            taqlcmd="SELECT DISTINCT STARTFREQ, ENDFREQ, " + parameter + " FROM " + self.tablename + " WHERE LASTITER=TRUE"
            selection=pt.taql(taqlcmd)           # execute TaQL command
            parmsDict["last"]=selection.getcol(parameter)    # select column with wanted parameter
            return parmsDict

        # return only a particular iteration
        elif type(iteration).__name__ == "int":
            parmsDict["result"]="iteration"

            taqlcmd="SELECT " + parameter + " FROM " + self.tablename + " WHERE ITER=" + str(iteration) + " ORDERBY STARTFREQ"
            #print "taqlcmd: ", taqlcmd           # DEBUG
            selection=pt.taql(taqlcmd)           # execute TaQL command      
            parmsDict[iteration]=selection.getcol(parameter)    # select column with wanted parameter

            return parmsDict
        else:
            parmsDict["result"]=False
            return parmsDict
Example #13
0
    def readCell(self, start_time, end_time, start_freq, end_freq, iteration="Last"):
        print "readCell(self, start_time, end_time, start_freq, end_freq, iteration=Last)"  # DEBUG

        start_time, end_time=self.fuzzyTime(start_time, end_time)
        start_freq, end_freq=self.fuzzyFreq(start_freq, end_freq)

        cellDict={}

        # return all iterations (default behaviour)
        if iteration == "all":
           cellDict["result"]="all"                   # give type of result

           # Loop over all iterations
           for iter in range(1, self.getMaxIter()):
                 taqlcmd="SELECT * FROM " + self.tablename + " WHERE STARTFREQ >=" + str(start_freq) + " AND ENDFREQ <= " + str(end_freq) + " AND ITER = " + str(iter)
                 cell[iter]=pt.taql(taqlcmd)           # execute TaQL command
           return cell

        # return the last iteration only
        elif iteration == "Last" or iteration == "last":
           cellDict["result"]="last"                   # give type of result

           # Loop over all iterations
           taqlcmd="SELECT * FROM " + self.tablename + " WHERE LASTITER=", str(iter)
           selection=pt.taql(taqlcmd)           # execute TaQL command
           cellDict["last"]=selection

           return cellDict

        # return only a particular iteration
        elif isinstance(iteration, int):
            cellDict["result"]="iteration"                   # give type of result

            taqlcmd="SELECT * FROM " + self.tablename + " WHERE STARTFREQ=" + str(start_freq) + " AND ENDFREQ=" + str(end_freq) + " AND ITER=" + str(iteration) + " ORDERBY STARTFREQ"
            selection=pt.taql(taqlcmd)           # execute TaQL command      

            cellDict[iteration]=selection

            return cellDict

        else:
            print "readCell(): unknown iteration"
            cellDict["result"]="False"


            return cellDict
Example #14
0
def addSourceTable (image, sourcedbName, minTime, maxTime):
    # Create the table using TaQL.
    tab = pt.taql ("create table '" + image.name() + "/LOFAR_SOURCE' " + 
                   "SOURCE_ID int, \TIME double, INTERVAL double, " +
                   "NUM_LINES int, NAME string, " +
                   "DIRECTION double shape=[2], " +
                   "PROPER_MOTION double shape=[2], " +
                   "FLUX double shape=[4], " +
                   "SPINX double, REF_FREQUENCY double, " +
                   "SHAPE double shape=[3]")
    tab.putcolkeyword ("TIME", "QuantumUnits", ["s"])
    tab.putcolkeyword ("INTERVAL", "QuantumUnits", ["s"])
    tab.putcolkeyword ("DIRECTION", "QuantumUnits", ["rad"])
    tab.putcolkeyword ("PROPER_MOTION", "QuantumUnits", ["rad/s"])
    tab.putcolkeyword ("FLUX", "QuantumUnits", ["Jy"])
    tab.putcolkeyword ("REF_FREQUENCY", "QuantumUnits", ["MHz"])
    tab.putcolkeyword ("SHAPE", "QuantumUnits", ["rad", "rad", "rad"])
    tab.putcolkeyword ("TIME", "MEASINFO", {"Ref":"UTC", "type":"epoch"})
    tab.putcolkeyword ("DIRECTION", "MEASINFO", {"Ref":"J2000", "type":"direction"})
    tab.flush()
    image.putkeyword ("ATTRGROUPS." + "LOFAR_SOURCE", tab)
    # Get all parameters from the source parmdb.
    midtime = (minTime + maxTime) / 2
    inttime = maxTime - minTime
    sourcedb = pdb.parmdb(sourcedbName)
    # Get all source names by getting the Ra parms from DEFAULTVALUES
    names = [name[3:] for name in sourcedb.getDefNames ("Ra:*")]
    values = sourcedb.getDefValues()
    sourcedb = 0   # close
    row = 0
    tab.addrows (len(names))
    # Add the info of all sources.
    # The field names below are as used in SourceDB.
    fldnames = ["Ra", "Dec", "I", "Q", "U", "V", "SpectralIndex:0",
                "ReferenceFrequency", "Orientation", "MajorAxis", "MinorAxis"]
    vals = [0. for fld in fldnames]
    for name in names:
        for i in range(len(fldnames)):
            key = fldnames[i] + ":" + name
            if values.has_key (key):
                vals[i] = values[key][0][0]
            else:
                vals[i] = 0.
        tab.putcell ("SOURCE_ID", row, row)
        tab.putcell ("TIME", row, midtime);
        tab.putcell ("INTERVAL", row, inttime);
        tab.putcell ("NUM_LINES", row, 0);
        tab.putcell ("NAME", row, name);
        tab.putcell ("DIRECTION", row, vals[:2]);
        tab.putcell ("PROPER_MOTION", row, (0.,0.));
        tab.putcell ("FLUX", row, vals[2:6]);
        tab.putcell ("SPINX", row, vals[6]);
        tab.putcell ("REF_FREQUENCY", row, vals[7]);
        tab.putcell ("SHAPE", row, vals[8:11]);
        row += 1
    # Ready.
    tab.close()
    print "Added subtable LOFAR_SOURCE containing", row, "rows"
Example #15
0
    def getRankDef(self, start_time=None, end_time=None, start_freq=None, end_freq=None):
        if start_time == None or end_time == None or start_freq == None or end_freq == None:
            rank=self.readParameterIdx("RANK", 0)
        else:
            taqlcmd="SELECT RANKDEF FROM " + self.tablename + " WHERE STARTTIME >= " + str(start_time) + " AND ENDTIME <= " + str(end_time) + " AND STARTFREQ >= " + str(start_freq) + " AND ENDFREQ <= " + str(end_freq) + " AND LASTITER=TRUE"
            result=pt.taql(taqlcmd)
            rankdef=result.getcol("RANKDEF")

        return rankdef
Example #16
0
    def run(self, infile):
        with log_time(self.logger):
            if os.path.exists(infile):
                self.logger.info("Processing %s" % (infile))
            else:
                self.logger.error("Dataset %s does not exist" % (infile))
                return 1

            try:
                self.outputs['start_time'] = taql(
                    "CALC MIN([SELECT TIME from %s])" % infile
                )[0]
                self.outputs['end_time'] = taql(
                    "CALC MAX([SELECT TIME from %s])" % infile
                )[0]
            except Exception, e:
                self.logger.error(str(e))
                return 1
Example #17
0
def read_ms(logger, msname, ateam, diameter=None):
    def get_station_diameter(table):
        histable = pt.table(table.getkeyword('HISTORY'), ack=False)
        for line in histable.getcell('APP_PARAMS', 0):
            try:
                key, value = line.split("=")
            except:
                pass
            if key == "Observation.antennaSet":
                antenna_set = value
                break
        if antenna_set == "LBA_INNER":
            logger.debug("LBA_INNER mode")
            return STATION_DIAMETER["LBA_INNER"]
        elif antenna_set[:3] == "LBA":
            logger.debug("LBA_(OUTER,SPARSE,X,Y) mode")
            return STATION_DIAMETER["LBA"]
        elif antenna_set[:3] == "HBA":
            logger.debug("HBA mode")
            return STATION_DIAMETER["HBA"]
        else:
            logger.error("Failed to identify antenna set")

    def field_size_ateam(table):
        logging.debug('Computing field size for A-team')
        fieldtable = table.getkeyword('FIELD').split()[1]
        taqloutput = pt.taql("calc from %s calc max(angdist (DELAY_DIR[0,], [%s]))" % (fieldtable, ", ".join(",".join(src) for src in ATEAM))  )
        return taqloutput[0]

    def field_size_nominal(table, wavelength, diameter):
        if not diameter:
            diameter = get_station_diameter(table)
        logger.debug("Station diameter %f m" % diameter)
        return 1.22*wavelength/diameter

    t = pt.table(msname, readonly=True, ack=False)
    interval = t.getcell('INTERVAL', 0)
    swtable = t.getkeyword('SPECTRAL_WINDOW')
    tsw = pt.table(swtable, readonly=True, ack=False)
    freq = tsw.getcell('REF_FREQUENCY', 0)
    wavelength = 299792458./freq
    maxbl = pt.taql("calc sqrt(max([select sumsqr(UVW[0:1]) from %s]))" % msname)[0] / wavelength
    chwidth = tsw.getcell('CHAN_WIDTH', 0)[0]

    if ateam:
        fieldsize = field_size_ateam(t)
    else:
        fieldsize = field_size_nominal(t, wavelength, diameter)

    logger.debug('Frequency is %f MHz'%(freq/1.e6))
    logger.debug('Wavelength is %f m'%(wavelength))
    logger.debug('Maximum baseline length is %f m = %f lambdas'%(maxbl*wavelength,maxbl))
    logger.debug('Integration time is %f sec'%(interval))
    logger.debug('Channel width is %f Hz'%(chwidth))
    logger.debug('Field size is %f degrees'%(fieldsize*180./3.14159))

    return fieldsize, maxbl, freq, interval, chwidth
Example #18
0
    def compareColumn(self, columnname, taql=False):
        if self.verbose:
            print "Comparing "+  bcolors.OKBLUE + columnname + bcolors.ENDC + " columns." # DEBUG

        passed=False
        errorcount=0                                # counter that counts rows with differying columns

        if taql==False:                             # If taql is not to be used for comparison, use numpy difference
          if self.debug:
            print "compareColumn() using numpy" 

          reftab=pt.table(self.MS)                # Open reference MS in readonly mode
          testtab=pt.table(self.test_MS)          # Open test MS in readonly mode     
     
          tc_ref=reftab.col(columnname)           # get column in reference table as numpy array
          tc_test=testtab.col(columnname)         # get column in test table as numpy array
  
          nrows=testtab.nrows()                  
          for i in progressbar( range(0, nrows-1), "comparing " + columnname + " ", 60):
              difference = numpy.max(abs(tc_test[i] - tc_ref[i]))    # Use numpy's ability to substract arrays from each other
              #sum=numpy.sum(difference)
              
              #if sum > (self.acceptancelimit/len(difference)):     # determine if this failed the test
              if difference > self.acceptancelimit:                 # determine if this failed the test
                  passed=False
              else:
                  passed=True
  
          reftab.close()
          testtab.close()
        else:
            if self.debug:
              print "compareColumn() using TaQL"          # DEBUG
  
            self.addRefColumnToTesttab(columnname)      # add reference table column as forward column
        
            testcolumnname = "test_" + columnname       # create name which is used in test_MS if refcolum was added
        
            # Loop over columns, compute and check difference (must be within self.acceptancelimit)            
            # use TaQL for this? How to select from two tables? TODO: check this!
            
#            taqlcmd = "SELECT * FROM '" + self.test_MS + "' WHERE !all(NEAR(Real("+columnname+"), Real("+testcolumnname+")) AND NEAR(Imag("+columnname+"), Imag("+testcolumnname+")))"
#            errorcount = result.nrows()
            taqlcmd = "SELECT * FROM '" + self.test_MS + "' WHERE !all(NEARABS(Real("+columnname+"), Real("+testcolumnname+")," + str(self.acceptancelimit) + ") AND NEARABS(Imag("+columnname+"), Imag("+testcolumnname+"),"+ str(self.acceptancelimit) +"))"
#            print "taqlcmd = ", taqlcmd     # DEBUG
            errorcount=pt.taql(taqlcmd).nrows()            
            
            if self.verbose or self.debug:
              print "errorcount = ", errorcount         # display number of errors=No. of rows

            # If test_MS COLUMN and reference COLUMN have any discrepancy...            
            if errorcount > 0:
                passed=False      # ... the test is failed
            else:
                passed=True
        return passed
Example #19
0
File: check.py Project: ska-sa/xova
def check_ms(args):
    """ Entrypoint, call with arguments """

    Q = pt.taql("SELECT *, SHAPE(DATA) AS DATA_SHAPE FROM {ms}"
                .format(ms=args.ms))
    D = pt.table("::".join((args.ms, "DATA_DESCRIPTION")), ack=False)
    S = pt.table("::".join((args.ms, "SPECTRAL_WINDOW")), ack=False)
    P = pt.table("::".join((args.ms, "POLARIZATION")), ack=False)

    spw_id = D.getcol("SPECTRAL_WINDOW_ID")
    pol_id = D.getcol("POLARIZATION_ID")
    num_chan = S.getcol("NUM_CHAN")
    num_corr = P.getcol("NUM_CORR")

    # Check channel and corrlation conformance for
    # visibility data
    for r in range(0, Q.nrows(), args.row_chunks):
        nrow = min(args.row_chunks, Q.nrows() - r)
        ddid = Q.getcol("DATA_DESC_ID", startrow=r, nrow=nrow)
        data_shape = Q.getcol("DATA_SHAPE", startrow=r, nrow=nrow)

        nchan = num_chan[spw_id[ddid]]
        ncorr = num_corr[pol_id[ddid]]

        shape = np.stack([nchan, ncorr], axis=1)
        if not np.array_equal(data_shape, shape):
            raise TableConformanceError(
                "DATA shapes don't match "
                "SPECTRAL_WINDOW.NUM_CHAN and "
                "POLARIZATION.NUM_CORR mapped via "
                "DATA_DESC_ID")

    for spw, nchan in enumerate(num_chan):
        chan_width = S.getcol("CHAN_WIDTH", startrow=spw, nrow=1)
        chan_freq = S.getcol("CHAN_FREQ", startrow=spw, nrow=1)
        effective_bw = S.getcol("EFFECTIVE_BW", startrow=spw, nrow=1)
        resolution = S.getcol("RESOLUTION", startrow=spw, nrow=1)

        _check_column_shape(chan_width, "CHAN_WIDTH",
                            "NUM_CHAN", (1, nchan))
        _check_column_shape(chan_freq, "CHAN_FREQ",
                            "NUM_CHAN", (1, nchan))
        _check_column_shape(effective_bw, "EFFECTIVE_BW",
                            "NUM_CHAN", (1, nchan))
        _check_column_shape(resolution, "RESOLUTION",
                            "NUM_CHAN", (1, nchan))

    for pol, ncorr in enumerate(num_corr):
        corr_type = P.getcol("CORR_TYPE", startrow=pol, nrow=1)
        corr_product = P.getcol("CORR_PRODUCT", startrow=pol, nrow=1)

        _check_column_shape(corr_type, "CORR_TYPE",
                            "NUM_CORR", (1, ncorr))
        _check_column_shape(corr_product, "CORR_PRODUCT",
                            "NUM_CORR", (1, ncorr, 2))
Example #20
0
    def getConvergedIteration(self, start_time, end_time, start_freq, end_freq):
        # Create fuzzy times and frequencies
        start_time, end_time=self.fuzzyTime(start_time, end_time)
        start_freq, end_freq=self.fuzzyTime(start_freq, end_freq)

        taqlcmd="SELECT STARTTIME, ENDTIME, ITER FROM " + self.tablename + " WHERE STARTTIME>=" + str(start_time) + " AND ENDTIME<=" + str(end_time) + " AND STARTFREQ>=" + str(start_freq) + " AND ENDFREQ<=" + str(end_freq) + " AND LASTITER=TRUE"
        
        result=pt.taql(taqlcmd)              # execute TaQL command
        iteration=result.getcol("ITER")      # get ITER parameter

        return iteration
Example #21
0
    def _convert_polarization(self, time_slice_filtered_path_list):
        """
        # convert to circular polarization 
        # method based on input from Javier Moldon <*****@*****.**>
        """
        for time_slice in time_slice_filtered_path_list:
            #apply the polarization to each ms
            try:
                opened_ms=pt.taql(
                    "update {0}/ set DATA = mscal.stokes(DATA,'circ')".format(time_slice))
                opened_ms.close()

                opened_ms=pt.taql(
                    "update {0}/POLARIZATION set CORR_TYPE=[5,6,7,8]".format(time_slice))
                opened_ms.close()
                self.logger.info("Converted to circular polarization using taql")
            except Exception, exception:
                self.logger.error("Problem applying polarization to ms: {0}".format(
                    time_slice))
                raise exception
Example #22
0
def uvflux(ms,column,baseline):
  taqlquery="select gstddev(SUMMED) as STDVALS, gmean(SUMMED) as MEANVALS, gcount(SUMMED) as NVALS from (select gmean(mean(0.5*(abs(%s[,0])+abs(%s[,3])))) as SUMMED from %s where (mscal.baseline('%s') and any(FLAG)!=True) GROUP BY TIME)"%(column,column,ms,baseline)
  stats = pt.taql(taqlquery)
  if stats.nrows() > 0:
    meanvals = stats.getcol('MEANVALS')[0]
    nvals = stats.getcol('NVALS')[0]
    stdvals = stats.getcol('STDVALS')[0]/sqrt(nvals)
    print ms,': from',nvals,'time samples, flux is',meanvals,'+/-',stdvals,'(%.2f%% fractional uncertainty)'%((stdvals/meanvals)*100.)
    return meanvals,stdvals
  else:
    print 'Subband %s is totally flagged, no fluxes here'%ms
    return 0., 0. 
Example #23
0
def taql_factory(query, style='Python', tables=[]):
    """ Calls pt.taql, converting TableProxy's in tables to pyrap tables """
    tabs = [t._table for t in tables]

    for t in tables:
        t._acquire(READLOCK)

    try:
        return pt.taql(query, style=style, tables=tabs)
    finally:
        for t in tables:
            t._release(READLOCK)
Example #24
0
def getAzEl(pointing,time,position,ha_limit=-1000):

    if HAS_PYRAP:
        if ha_limit==-1000:
            azel=radec2azel(pointing[0],pointing[1],time=str(time)+'s',pos=position);
            az=azel['m0']['value']
            el=azel['m1']['value']
        else:
            me=measures()
            p=me.position("ITRF",str(position[0])+'m',str(position[1])+'m',str(position[2])+'m')
            t=me.epoch("UTC",qu.quantity(str(time)+'s'))
            phasedir=me.direction('J2000',str(pointing[0])+'rad',str(pointing[1])+'rad')
            me.doframe(p)
            me.doframe(t)
            hadec=me.measure(phasedir,"HADEC")
            if abs(hadec['m0']['value'])>ha_limit:
                print "below horizon",tab.taql('calc ctod($time s)')[0],degrees(hadec['m0']['value']),degrees(hadec['m1']['value'])
                return 999,999
            else:
                azel=me.measure(phasedir,"AZEL")
  
                az=azel['m0']['value'];
                el=azel['m1']['value'];
    elif HAS_EPHEM:
        if ha_limit!=-1000:
            print "limiting on HA/DEC not implemented for PyEphem yet, ignoring"
        location_lat, location_lon, location_height = ITRFToWGS84(position[0], position[1], position[2])
        location = ephem.Observer()
        # convert geodetic latitude to geocentric
        # flattening, f, defined above for WGS84 stuff
        geodet_lat = math.radians(location_lat)
        tan_geocentric_latitude =  math.tan(geodet_lat) * (1 - f) **2
        geocentric_latitude = GeodeticToGeocentricLat(geodet_lat, location_height)
        location.lat = geocentric_latitude
        location.lon = math.radians(location_lon)
        location.elevation = location_height
        location.pressure = 0.0
        #  convert to Dublin Julian Date for PyEphem
        location.date =  time/86400.0 - 15019.5
        lst = location.sidereal_time()
        equatorial = ephem.Equatorial(str(12.0 * math.degrees(pointing[0])/180),str(math.degrees(pointing[1])))
        body = ephem.FixedBody()
        body._ra = equatorial.ra
        body._dec = equatorial.dec
        body._epoch = equatorial.epoch
        body.compute(location)
        az = math.degrees(body.az)   * math.pi / 180.0
        el = math.degrees(body.alt) * math.pi / 180.0
    else: 
      print ('failure to get azimuth and elevation! Exiting!')
      return -1,-1
    return az,el
Example #25
0
    def getConvergedParameter(self, parameter, start_time, end_time, start_freq, end_freq):
        # Create fuzzy times and frequencies
        start_time, end_time=self.fuzzyTime(start_time, end_time)
        start_freq, end_freq=self.fuzzyFreq(start_freq, end_freq)

        taqlcmd="SELECT STARTTIME, ENDTIME, LASTITER, " + parameter + " FROM " + self.tablename + " WHERE STARTTIME>=" + str(start_time) + " AND ENDTIME<=" + str(end_time) + " AND STARTFREQ>=" + str(start_freq) + " AND ENDFREQ<=" + str(end_freq) + " AND LASTITER=TRUE"

        print "taqlcmd = ", taqlcmd          # DEBUG
        
        result=pt.taql(taqlcmd)              # execute TaQL command
        selection=result.getcol(parameter)   # get parameter column

        return selection 
Example #26
0
    def run(self, infile, baseline_filename):
        """
        baseline_filename points to a file continaing a pickled array of
        antenna pairs.
        """
        with log_time(self.logger):
            if os.path.exists(infile):
                self.logger.info("Processing %s" % (infile))
            else:
                self.logger.error("Dataset %s does not exist" % (infile))
                return 1

            if not os.path.exists(baseline_filename):
                self.logger.error(
                    "baseline file %s not found" % (baseline_filename)
                )
                return 1

            with open(baseline_filename) as file:
                baselines = load(file)

            antenna1, antenna2 = [], []
            for baseline in baselines:
                ant1, ant2 = baseline.split("&")
                antenna1.append(int(ant1))
                antenna2.append(int(ant2))


            if antenna1 and antenna2:
                cmd = "UPDATE %s SET FLAG=True WHERE any(ANTENNA1=%s and ANTENNA2=%s)" % \
                    (infile, str(antenna1), str(antenna2))
                self.logger.info("Running TaQL: " + cmd)

                try:
                    taql(cmd)
                except Exception, e:
                    self.logger.warn(str(e))
                    return 1
            else:
Example #27
0
    def readFreqColumnTimeSlot(self, parameter, start_time, end_time, iteration="last"):
        # Get first all unique time slots
        if self.timeSlots.nrows()==0:
            self.timeSlots=getTimeSlots()

        # Create fuzzy times
        start_time, end_time = self.fuzzyTime(start_time, end_time)

        if iteration == "last":
           parmsDict["result"]="last"

           taqlcmd="SELECT STARTFREQ, ENDFREQ, " + parameter + " FROM " + self.tablename + " WHERE STARTTIME=" + start_time + " AND ENDTIME=" + end_time + " WHERE ITER=MAXITER ORDER BY ITER"
           selection=pt.taql(taqlcmd)           # execute TaQL command
           parmsDict["last"]=selection.getcol(parameter)    # select column with wanted parameter

           return parmsDict

        elif iteration == "all":
            parmsDict["result"]="all"

            for iter in range(1, self.getMaxIter()+1):
                taqlcmd="SELECT DISTINCT STARTFREQ, ENDFREQ, " + parameter + ", ITER FROM " + self.tablename + " WHERE STARTTIME=" + start_time + " AND ENDTIME=" + end_time
                selection=pt.taql(taqlcmd)           # execute TaQL command
                print selection                      # DEBUG
                parmsDict[str(iter)]=selection.getcol(parameter)    # select column with wanted parameter
            return parmsDict

        elif type(iteration).__name__ == "int":
            parmsDict["result"]="iteration"

            taqlcmd="SELECT STARTFREQ, ENDFREQ, " + parameter + " FROM " + self.tablename + " WHERE STARTTIME=" + start_time + " AND ENDTIME=" + end_time + " WHERE ITER=", str(iteration)
            selection=pt.taql(taqlcmd)
            parmsDict[str(iteration)]=selection.getcol(parameter)
            
            return parmsDict

        else:
            parmsDict["result"]="False"
            return parmsDict
Example #28
0
    def readCells(self, start_time, end_time, start_freq, end_freq, iteration="last"):
        print "readCells(self, start_time, end_time, start_freq, end_freq)"     # DEBUG

        cellsDict={}       # create an empty dictionary

        # return all iterations (default behaviour)
        if iteration == "all":
           cellsDict["result"]="all"

           # Loop over all iterations
           for iter in range(1, maxIter+1):
                 taqlcmd="SELECT * FROM " + self.tablename + " WHERE STARTTIME>=" + str(start_freq) + " AND ENDTIME<=" + str(end_freq) + " AND STARTFREQ>=" + str(start_freq) + " AND ENDFREQ<=" + str(end_freq) + " AND ITER=" + str(iter)
                 cell[iter]=pt.taql(taqlcmd)           # execute TaQL command
           return cell

        # return the last iteration only
        elif iteration == "Last" or iteration == "last":
           print "readCells(): last"        # DEBUG

           cellsDict["result"]="last"

           taqlcmd="SELECT * FROM " + self.tablename + " WHERE STARTTIME>=" + str(start_freq) + " AND ENDTIME<=" + str(end_freq) + " AND STARTFREQ>=" + str(start_freq) + " AND ENDFREQ<=" + str(end_freq) + " AND LASTITER=TRUE"
           cellsDict["last"]=pt.taql(taqlcmd)           # execute TaQL command
   
           return cellsDict

        # return only a particular iteration
        elif type(iteration).__name__ == "int":
            print "iteration: ", iteration    # DEBUG

            cellsDict["result"]="iteration"

            taqlcmd="SELECT * FROM " + self.tablename + " WHERE STARTFREQ=" + str(start_freq) + " AND ENDFREQ=" + str(end_freq) + " AND ITER=" + str(iteration) + " ORDERBY STARTFREQ"
            selection=pt.taql(taqlcmd)        # execute TaQL command      

            cellsDict[iteration]=selection

            return cellsDict
Example #29
0
def updateObsTable(image, msName, minbl, maxbl, aswvl, usedCounts, visCounts,
                   minTime, maxTime, totTime):
    obstab = pt.table(image.name() + "/LOFAR_OBSERVATION",
                      readonly=False,
                      ack=False)
    oritab = pt.table(image.name() + "/LOFAR_ORIGIN", ack=False)
    minfreq = pt.taql("calc min([select FREQUENCY_MIN from '" + oritab.name() +
                      "'])")
    maxfreq = pt.taql("calc max([select FREQUENCY_MAX from '" + oritab.name() +
                      "'])")
    obstab.putcell("OBSERVATION_FREQUENCY_MIN", 0, minfreq[0])
    obstab.putcell("OBSERVATION_FREQUENCY_MAX", 0, maxfreq[0])
    obstab.putcell("OBSERVATION_FREQUENCY_CENTER", 0,
                   (minfreq[0] + maxfreq[0]) / 2)
    obstab.putcell("OBSERVATION_INTEGRATION_TIME", 0, totTime)
    obstab.putcell("OBSERVATION_START", 0, minTime)
    obstab.putcell("OBSERVATION_END", 0, maxTime)
    obstab.putcell("TIME_RANGE", 0, (minTime, maxTime))
    obstab.putcell("FILENAME", 0, os.path.basename(image.name()))
    obstab.putcell("FILETYPE", 0, "sky")
    pt.taql("update '" + obstab.name() + "' set FILEDATE = mjd(date()), " +
            "RELEASE_DATE = mjd(date()+365)")
    # Determine minimum and maximum baseline length
    # If needed, convert from wavelengths to meters.
    mstab = pt.table(msName, ack=False)
    if aswvl:
        minbl *= 2.99792e8 / maxfreq[0]
        maxbl *= 2.99792e8 / minfreq[0]
    if minbl <= 0:
        mbl = pt.taql("calc sqrt(min([select sumsqr(UVW[:2]) from " + msName +
                      "]))")
        minbl = max(mbl[0], abs(minbl))
    if maxbl <= 0:
        mbl = pt.taql("calc sqrt(max([select sumsqr(UVW[:2]) from " + msName +
                      "]))")
        if maxbl == 0:
            maxbl = mbl[0]
        else:
            maxbl = min(mbl[0], abs(maxbl))
    mstab.close()
    # Add and fill a few extra columns.
    col1 = pt.makescacoldesc("MIN_BASELINE_LENGTH", 0, valuetype='double')
    col2 = pt.makescacoldesc("MAX_BASELINE_LENGTH", 0, valuetype='double')
    col3 = pt.makearrcoldesc("NVIS_USED", 0, valuetype='int')
    col4 = pt.makearrcoldesc("NVIS_TOTAL", 0, valuetype='int')
    obstab.addcols(pt.maketabdesc([col1, col2, col3, col4]))
    obstab.putcolkeyword("MIN_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcolkeyword("MAX_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcell("MIN_BASELINE_LENGTH", 0, minbl)
    obstab.putcell("MAX_BASELINE_LENGTH", 0, maxbl)
    # Get sum for all MSs.
    tusedCounts = usedCounts.sum(axis=0)
    tvisCounts = visCounts.sum(axis=0)
    obstab.putcell("NVIS_USED", 0, tusedCounts)
    obstab.putcell("NVIS_TOTAL", 0, tvisCounts)
    obstab.close()
    oritab.close()
    print "Updated subtable LOFAR_OBSERVATION"
Example #30
0
def uvflux(ms, column, baseline):
    taqlquery = "select gstddev(SUMMED) as STDVALS, gmean(SUMMED) as MEANVALS, gcount(SUMMED) as NVALS from (select gmean(mean(0.5*(abs(%s[,0])+abs(%s[,3])))) as SUMMED from %s where (mscal.baseline('%s') and any(FLAG)!=True) GROUP BY TIME)" % (
        column, column, ms, baseline)
    stats = pt.taql(taqlquery)
    if stats.nrows() > 0:
        meanvals = stats.getcol('MEANVALS')[0]
        nvals = stats.getcol('NVALS')[0]
        stdvals = stats.getcol('STDVALS')[0] / sqrt(nvals)
        print ms, ': from', nvals, 'time samples, flux is', meanvals, '+/-', stdvals, '(%.2f%% fractional uncertainty)' % (
            (stdvals / meanvals) * 100.)
        return meanvals, stdvals
    else:
        print 'Subband %s is totally flagged, no fluxes here' % ms
        return 0., 0.
Example #31
0
def get_time_for_file_name(ms_file):
    """get_time_for_file_name(ms_file) -> str

    Given an MS file, query it to get the time and then process the string to generate a time for a filename
    """
    time = pt.taql('calc ctod(mjdtodate([select TIME from %s LIMIT 1]))' %
                   ms_file)
    # above returns something that looks like:
    #               {'array': ['2013/02/15/06:53:06.003'], 'shape': [1, 1]}
    # code below returns something that looks like (HH_MM_SS), e.g.:
    #               06_53_06
    file_name_time = time['array'][0].split('/')[-1].split(".")[0].replace(
        ':', '_')
    return file_name_time
Example #32
0
def ms(tmp_path_factory):
    msdir = tmp_path_factory.mktemp("msdir", numbered=True)
    fn = os.path.join(str(msdir), "test.ms")

    create_table_query = f"""
    CREATE TABLE {fn}
    [FIELD_ID I4,
    ANTENNA1 I4,
    ANTENNA2 I4,
    DATA_DESC_ID I4,
    SCAN_NUMBER I4,
    STATE_ID I4,
    UVW R8 [NDIM=1, SHAPE=[3]],
    TIME R8,
    DATA C8 [NDIM=2, SHAPE=[16, 4]]]
    LIMIT 10
    """

    # Common grouping columns
    field = [0, 0, 0, 1, 1, 1, 1, 2, 2, 2]
    ddid = [0, 0, 0, 0, 0, 0, 0, 1, 1, 1]
    scan = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]

    # Common indexing columns
    time = [1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
    ant1 = [0, 0, 1, 1, 1, 2, 1, 0, 0, 1]
    ant2 = [1, 2, 2, 3, 2, 1, 0, 1, 1, 2]

    # Column we'll write to
    state = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]

    rs = np.random.RandomState(42)
    data_shape = (len(state), 16, 4)
    data = rs.random_sample(data_shape) + rs.random_sample(data_shape) * 1j
    uvw = rs.random_sample((len(state), 3)).astype(np.float64)

    # Create the table
    with pt.taql(create_table_query) as ms:
        ms.putcol("FIELD_ID", field)
        ms.putcol("DATA_DESC_ID", ddid)
        ms.putcol("ANTENNA1", ant1)
        ms.putcol("ANTENNA2", ant2)
        ms.putcol("SCAN_NUMBER", scan)
        ms.putcol("STATE_ID", state)
        ms.putcol("UVW", uvw)
        ms.putcol("TIME", time)
        ms.putcol("DATA", data)

    yield fn
Example #33
0
    def _convert_polarization(self, time_slice_filtered_path_list):
        """
        # convert to circular polarization 
        # method based on input from Javier Moldon <*****@*****.**>
        """
        for time_slice in time_slice_filtered_path_list:
            #apply the polarization to each ms
            try:
                opened_ms = pt.taql(
                    "update {0}/ set DATA = mscal.stokes(DATA,'circ')".format(
                        time_slice))
                opened_ms.close()

                opened_ms = pt.taql(
                    "update {0}/POLARIZATION set CORR_TYPE=[5,6,7,8]".format(
                        time_slice))
                opened_ms.close()
                self.logger.info(
                    "Converted to circular polarization using taql")
            except Exception, exception:
                self.logger.error(
                    "Problem applying polarization to ms: {0}".format(
                        time_slice))
                raise exception
Example #34
0
    def get_image_sizes(self,
                        cellsize_highres_deg=None,
                        cellsize_lowres_deg=None,
                        fieldsize_highres=2.5,
                        fieldsize_lowres=6.5):
        """
        Sets sizes for initsubtract images

        The image sizes are scaled from the mean primary-beam FWHM. For
        the high-res image, we use 2.5 * FWHM; for low-res, we use 6.5 * FHWM.

        Parameters
        ----------
        cellsize_highres_deg : float, optional
            cellsize for the high-res images in deg
        cellsize_lowres_deg : float, optional
            cellsize for the low-res images in deg
        fieldsize_highres : float, optional
            How many FWHM's shall the high-res images be.
        fieldsize_lowres : float, optional
            How many FWHM's shall the low-res images be.
        """
        if cellsize_highres_deg:
            self.cellsize_highres_deg = cellsize_highres_deg
        if cellsize_lowres_deg:
            self.cellsize_lowres_deg = cellsize_lowres_deg
        if not hasattr(self, 'mean_el_rad'):
            for MS_id in xrange(self.numMS):
                # calculate mean elevation
                tab = pt.table(self.files[MS_id], ack=False)
                el_values = pt.taql("SELECT mscal.azel1()[1] AS el from " +
                                    self.files[MS_id] +
                                    " limit ::10000").getcol("el")
                if MS_id == 0:
                    global_el_values = el_values
                else:
                    global_el_values = np.hstack((global_el_values, el_values))
            self.mean_el_rad = np.mean(global_el_values)

        # Calculate mean FOV
        sec_el = 1.0 / np.sin(self.mean_el_rad)
        self.fwhm_deg = 1.1 * (
            (3.0e8 / self.freq) / self.diam) * 180. / np.pi * sec_el
        self.imsize_high_res = self.get_optimum_size(
            self.fwhm_deg / self.cellsize_highres_deg * fieldsize_highres)
        self.imsize_low_res = self.get_optimum_size(
            self.fwhm_deg / self.cellsize_lowres_deg * fieldsize_lowres)
        return (self.imsize_high_res, self.imsize_low_res)
Example #35
0
def mypointsgenerator(refms,stationname):
  ''' Generate a list of all az-el pixels that are used in the given list of measurement sets '''
  h = pyfits.open('wcs_azimuth_201.fits')
  w = pywcs.WCS(h[0].header)
  
  allpix=set() 
  print 'refms in mypointsgenerator:',refms
  for msname in refms:
    print "Extracting az/el from", msname, "for station", stationname
    t=pt.taql('select mscal.azel1() deg as AZEL from %s where [select NAME from ::ANTENNA][ANTENNA1]=="%s"'%(msname,stationname))
    if len(t)>0:
      pix=set(tuple(azel) for azel in (np.array(w.wcs_sky2pix(t.getcol('AZEL'),0))+0.5).astype(int))
      allpix = allpix.union(pix)
  print 'allpix for', refms, 'and station', stationname, ':', allpix
  for i, j in allpix:
    yield i,j
Example #36
0
def countVisTime(msNames, taqlStr, baselineStr, minbl, maxbl):
    print "Counting visibility flags ..."
    t = pt.table(msNames[0] + '/ANTENNA', ack=False)
    nant = t.nrows()
    t.close()
    visCounts = np.zeros((len(msNames), nant, nant), 'int')
    usedCounts = np.zeros((len(msNames), nant, nant), 'int')
    minTime = +1e30
    maxTime = -1e30
    totTime = 0.
    for j in range(len(msNames)):
        # If baseline selection is done, use msselect to apply it.
        msname = msNames[j]
        if len(baselineStr) > 0:
            msname = msNames[j] + '.sel_addimginfo'
            os.system("msselect 'in=" + msNames[j] + "' 'out=" + msname +
                      "' 'baseline=" + baselineStr + "'")
        # Skip auto-correlations and apply possible TaQL selection
        whereStr = ' where ANTENNA1!=ANTENNA2'
        if len(taqlStr) > 0:
            whereStr += ' && (' + taqlStr + ')'
        if minbl > 0:
            whereStr += ' && sumsqr(UVW[:2]) >= sqr(' + str(minbl) + ')'
        if maxbl > 0:
            whereStr += ' && sumsqr(UVW[:2]) <= sqr(' + str(maxbl) + ')'
        t = pt.taql(
            'select TIME-0.5*INTERVAL as STIME, TIME+0.5*INTERVAL as ETIME, ANTENNA1,ANTENNA2,nfalse(FLAG) as NUSED,count(FLAG) as NVIS from '
            + msname + whereStr + ' giving as memory')
        ant1 = t.getcol('ANTENNA1')
        ant2 = t.getcol('ANTENNA2')
        nused = t.getcol('NUSED')
        nvis = t.getcol('NVIS')
        # Count number of used visibilities per antenna per MS
        for i in range(len(ant1)):
            usedCounts[j, ant1[i], ant2[i]] += nused[i]
            usedCounts[j, ant2[i], ant1[i]] += nused[i]
            visCounts[j, ant1[i], ant2[i]] += nvis[i]
            visCounts[j, ant2[i], ant1[i]] += nvis[i]
        sTime = t.getcol('STIME').min()
        eTime = t.getcol('ETIME').max()
        minTime = min(minTime, t.getcol('STIME').min())
        maxTime = max(maxTime, t.getcol('ETIME').max())
        totTime += (eTime - sTime)
        t.close()
        if msname != msNames[j]:
            os.system('rm -rf ' + msname)
    return (usedCounts, visCounts, minTime, maxTime, totTime)
Example #37
0
def updateObsTable (image, msName, minbl, maxbl, aswvl,
                    usedCounts, visCounts, minTime, maxTime, totTime):
    obstab = pt.table (image.name() + "/LOFAR_OBSERVATION", readonly=False,
                       ack=False)
    oritab = pt.table (image.name() + "/LOFAR_ORIGIN", ack=False)
    minfreq = pt.taql ("calc min([select FREQUENCY_MIN from '" +
                       oritab.name() + "'])")
    maxfreq = pt.taql ("calc max([select FREQUENCY_MAX from '" +
                       oritab.name() + "'])") 
    obstab.putcell ("OBSERVATION_FREQUENCY_MIN", 0, minfreq[0]);
    obstab.putcell ("OBSERVATION_FREQUENCY_MAX", 0, maxfreq[0]);
    obstab.putcell ("OBSERVATION_FREQUENCY_CENTER", 0, (minfreq[0]+maxfreq[0])/2);
    obstab.putcell ("OBSERVATION_INTEGRATION_TIME", 0, totTime);
    obstab.putcell ("OBSERVATION_START", 0, minTime);
    obstab.putcell ("OBSERVATION_END", 0, maxTime);
    obstab.putcell ("TIME_RANGE", 0, (minTime, maxTime));
    obstab.putcell ("FILENAME", 0, os.path.basename(image.name()))
    obstab.putcell ("FILETYPE", 0, "sky")
    pt.taql ("update '" + obstab.name() + "' set FILEDATE = mjd(date()), " +
             "RELEASE_DATE = mjd(date()+365)")
    # Determine minimum and maximum baseline length
    # If needed, convert from wavelengths to meters.
    mstab = pt.table(msName, ack=False)
    if aswvl:
        minbl *= 2.99792e8 / maxfreq[0]
        maxbl *= 2.99792e8 / minfreq[0]
    if minbl <= 0:
        mbl = pt.taql ("calc sqrt(min([select sumsqr(UVW[:2]) from " + msName + "]))")
        minbl = max(mbl[0], abs(minbl))
    if maxbl <= 0:
        mbl = pt.taql ("calc sqrt(max([select sumsqr(UVW[:2]) from " + msName + "]))")
        if maxbl == 0:
            maxbl = mbl[0]
        else:
            maxbl = min(mbl[0], abs(maxbl))
    mstab.close()
    # Add and fill a few extra columns.
    col1 = pt.makescacoldesc ("MIN_BASELINE_LENGTH", 0, valuetype='double')
    col2 = pt.makescacoldesc ("MAX_BASELINE_LENGTH", 0, valuetype='double')
    col3 = pt.makearrcoldesc ("NVIS_USED", 0, valuetype='int')
    col4 = pt.makearrcoldesc ("NVIS_TOTAL", 0, valuetype='int')
    obstab.addcols (pt.maketabdesc ([col1, col2, col3, col4]))
    obstab.putcolkeyword ("MIN_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcolkeyword ("MAX_BASELINE_LENGTH", "QuantumUnits", ["m"])
    obstab.putcell ("MIN_BASELINE_LENGTH", 0, minbl)
    obstab.putcell ("MAX_BASELINE_LENGTH", 0, maxbl)
    # Get sum for all MSs.
    tusedCounts = usedCounts.sum (axis=0)
    tvisCounts  =  visCounts.sum (axis=0)
    obstab.putcell ("NVIS_USED", 0, tusedCounts)
    obstab.putcell ("NVIS_TOTAL", 0, tvisCounts)
    obstab.close()
    oritab.close()
    print "Updated subtable LOFAR_OBSERVATION"
Example #38
0
def ms(tmp_path_factory):
    msdir = tmp_path_factory.mktemp("msdir", numbered=False)
    fn = os.path.join(str(msdir), "test.ms")

    create_table_query = """
    CREATE TABLE %s
    [FIELD_ID I4,
    ANTENNA1 I4,
    ANTENNA2 I4,
    DATA_DESC_ID I4,
    SCAN_NUMBER I4,
    STATE_ID I4,
    TIME R8,
    DATA C8 [NDIM=2, SHAPE=[16, 4]]]
    LIMIT 10
    """ % fn

    # Common grouping columns
    field = [0, 0, 0, 1, 1, 1, 1, 2, 2, 2]
    ddid = [0, 0, 0, 0, 0, 0, 0, 1, 1, 1]
    scan = [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]

    # Common indexing columns
    time = [1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1]
    ant1 = [0, 0, 1, 1, 1, 2, 1, 0, 0, 1]
    ant2 = [1, 2, 2, 3, 2, 1, 0, 1, 1, 2]

    # Column we'll write to
    state = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]

    data_shape = (len(state), 16, 4)

    data = np.random.random(data_shape) + np.random.random(data_shape) * 1j

    # Create the table
    with pt.taql(create_table_query) as ms:
        ms.putcol("FIELD_ID", field)
        ms.putcol("DATA_DESC_ID", ddid)
        ms.putcol("ANTENNA1", ant1)
        ms.putcol("ANTENNA2", ant2)
        ms.putcol("SCAN_NUMBER", scan)
        ms.putcol("STATE_ID", state)
        ms.putcol("TIME", time)
        ms.putcol("DATA", data)

    yield fn
Example #39
0
def countVisTime (msNames, taqlStr, baselineStr, minbl, maxbl):
    print "Counting visibility flags ..."
    t = pt.table(msNames[0] + '/ANTENNA', ack=False)
    nant = t.nrows();
    t.close()
    visCounts  = np.zeros ((len(msNames), nant, nant), 'int');
    usedCounts = np.zeros ((len(msNames), nant, nant), 'int');
    minTime    = +1e30
    maxTime    = -1e30
    totTime    = 0.
    for j in range(len(msNames)):
        # If baseline selection is done, use msselect to apply it.
        msname = msNames[j];
        if len(baselineStr) > 0:
            msname = msNames[j] + '.sel_addimginfo'
            os.system ("msselect 'in=" + msNames[j] + "' 'out=" + msname +
                       "' 'baseline=" + baselineStr + "'")
        # Skip auto-correlations and apply possible TaQL selection
        whereStr = ' where ANTENNA1!=ANTENNA2'
        if len(taqlStr) > 0:
            whereStr += ' && (' + taqlStr + ')'
        if minbl > 0:
            whereStr += ' && sumsqr(UVW[:2]) >= sqr(' + str(minbl) + ')'
        if maxbl > 0:
            whereStr += ' && sumsqr(UVW[:2]) <= sqr(' + str(maxbl) + ')'
        t = pt.taql ('select TIME-0.5*INTERVAL as STIME, TIME+0.5*INTERVAL as ETIME, ANTENNA1,ANTENNA2,nfalse(FLAG) as NUSED,count(FLAG) as NVIS from ' +
                     msname + whereStr + ' giving as memory')
        ant1  = t.getcol ('ANTENNA1')
        ant2  = t.getcol ('ANTENNA2')
        nused = t.getcol ('NUSED')
        nvis  = t.getcol ('NVIS')
        # Count number of used visibilities per antenna per MS
        for i in range(len(ant1)):
            usedCounts[j, ant1[i], ant2[i]] += nused[i]
            usedCounts[j, ant2[i], ant1[i]] += nused[i]
            visCounts [j, ant1[i], ant2[i]] += nvis[i]
            visCounts [j, ant2[i], ant1[i]] += nvis[i]
        sTime = t.getcol('STIME').min()
        eTime = t.getcol('ETIME').max()
        minTime = min(minTime, t.getcol('STIME').min())
        maxTime = max(maxTime, t.getcol('ETIME').max())
        totTime += (eTime - sTime)
        t.close()
        if msname != msNames[j]:
            os.system ('rm -rf ' + msname)
    return (usedCounts, visCounts, minTime, maxTime, totTime)
Example #40
0
def flagrms(rootname, threshold=9):

    t = pt.table(rootname + '/ANTENNA', readonly=True, ack=False)
    #antennaname=pt.tablecolumn(t,'NAME')
    antennaname = list(t.col('NAME'))
    t.close()
    an = len(antennaname)
    print 'There are', an, 'antennas'

    t = pt.table(rootname, readonly=True, ack=False)
    xxm = np.zeros(an)
    yym = np.zeros(an)
    xxrms = np.zeros(an)
    yyrms = np.zeros(an)
    for i, ant in enumerate(antennaname):

        newt = pt.taql(
            'select CORRECTED_DATA,FLAG from $t where ANTENNA1=$i or ANTENNA2=$i'
        )
        channels, corrs = np.shape(newt[0]['CORRECTED_DATA'])

        xx = newt.getcol('CORRECTED_DATA')[:, :, 0]
        yy = newt.getcol('CORRECTED_DATA')[:, :, 3]
        fxx = np.logical_not(newt.getcol('FLAG'))[:, :, 0]
        fyy = np.logical_not(newt.getcol('FLAG'))[:, :, 3]
        axx = np.abs(xx)
        ayy = np.abs(yy)
        xxm[i] = abs(np.mean(axx[fxx]))
        yym[i] = abs(np.mean(ayy[fyy]))
        xxrms[i] = np.std(axx[fxx])
        yyrms[i] = np.std(ayy[fyy])
        print i, ant, xxm[i], yym[i], xxrms[i], yyrms[i]

    fsum = (xxm + yym) / 2.0
    rmsum = (xxrms + yyrms) / 2.0
    fmed = np.median(fsum)
    rmmed = np.median(rmsum)
    badness = fsum * rmsum / rmmed / fmed
    flaglist = []
    for i, ant in enumerate(antennaname):
        print i, antennaname[i], fsum[i] / fmed, rmsum[i] / rmmed, badness[
            i], 'Flag' if (badness[i] > threshold) else ''
        if badness[i] > threshold:
            flaglist.append(antennaname[i])

    return flaglist
Example #41
0
def big_ms(tmp_path_factory, request):
    msdir = tmp_path_factory.mktemp("big_ms_dir", numbered=False)
    fn = os.path.join(str(msdir), "big.ms")
    row = request.param
    chan = 4096
    corr = 4
    ant = 7

    create_table_query = f"""
    CREATE TABLE {fn}
    [FIELD_ID I4,
    TIME R8,
    ANTENNA1 I4,
    ANTENNA2 I4,
    DATA_DESC_ID I4,
    SCAN_NUMBER I4,
    STATE_ID I4,
    DATA C8 [NDIM=2, SHAPE=[{chan}, {corr}]]]
    LIMIT {row}
    """

    rs = np.random.RandomState(42)
    data_shape = (row, chan, corr)
    data = rs.random_sample(data_shape) + rs.random_sample(data_shape) * 1j

    # Create the table
    with pt.taql(create_table_query) as ms:
        ant1, ant2 = (a.astype(np.int32) for a in np.triu_indices(ant, 1))
        bl = ant1.shape[0]
        ant1 = np.repeat(ant1, (row + bl - 1) // bl)
        ant2 = np.repeat(ant2, (row + bl - 1) // bl)

        zeros = np.zeros(row, np.int32)

        ms.putcol("ANTENNA1", ant1[:row])
        ms.putcol("ANTENNA2", ant2[:row])

        ms.putcol("FIELD_ID", zeros)
        ms.putcol("DATA_DESC_ID", zeros)
        ms.putcol("SCAN_NUMBER", zeros)
        ms.putcol("STATE_ID", zeros)
        ms.putcol("TIME", np.linspace(0, 1.0, row, dtype=np.float64))
        ms.putcol("DATA", data)

    yield fn
Example #42
0
def ant_table(tmp_path_factory, wsrt_antenna_positions):
    ant_dir = tmp_path_factory.mktemp("ant_dir", numbered=True)
    fn = os.path.join(str(ant_dir), "ANTENNA")

    create_table_query = """
    CREATE TABLE %s
    [POSITION R8 [NDIM=1, SHAPE=[3]],
     NAME S]
    LIMIT %d
    """ % (fn, wsrt_antenna_positions.shape[0])

    names = ["ANTENNA-%d" % i for i in range(wsrt_antenna_positions.shape[0])]

    with pt.taql(create_table_query) as ant:
        ant.putcol("POSITION", wsrt_antenna_positions)
        ant.putcol("NAME", names)

    yield fn
Example #43
0
def ms2matlab(datacolname='DATA',msname='test1.MS',visfilename='bbsvis.mat',timeslot=0,modelcolname='MODEL_DATA',applyweights=True,channel=0):
  """converts a measurement set to a matlab file"""
  t0=table(msname,ack=False)

  t=taql('SELECT *, MAX(WEIGHT_SPECTRUM) as MW FROM $t0 WHERE TIME IN (SELECT DISTINCT TIME FROM $t0 OFFSET $timeslot LIMIT 1)')

  data=t.col(datacolname)
  flag=t.col('FLAG')
  model=t.col(modelcolname)
  weight=t.col('WEIGHT_SPECTRUM')

  antenna1=t.col('ANTENNA1')
  antenna2=t.col('ANTENNA2')

  nants=len(set(antenna1))

  V  = np.zeros((nants*2,nants*2),dtype=np.complex)
  Vm = np.zeros((nants*2,nants*2),dtype=np.complex)
  W  = np.zeros((nants*2,nants*2),dtype=np.float)

  for i in range(data.nrows()):
    ant1=antenna1[i]
    ant2=antenna2[i]
    if ant1==ant2:
      continue
    for cor in range(4):
      if not flag[i][0][cor]:
        if applyweights:
          V[ 2*ant1+cor/2][2*ant2+cor%2] = data[i][channel][cor]*sqrt(weight[i][0][cor])
          Vm[2*ant1+cor/2][2*ant2+cor%2] =model[i][channel][cor]*sqrt(weight[i][0][cor])
        else:
          V[ 2*ant1+cor/2][2*ant2+cor%2] =  data[i][channel][cor]
          Vm[2*ant1+cor/2][2*ant2+cor%2] = model[i][channel][cor]
          W[ 2*ant1+cor/2][2*ant2+cor%2] =weight[i][channel][cor]
  
  if applyweights:
    scipy.io.savemat(visfilename, dict(V=V, Vm=Vm), oned_as="row")
  else:
    scipy.io.savemat(visfilename, dict(V=V, Vm=Vm, Wgt=W), oned_as="row")

  print "Stored timeslot", timeslot, " channel", channel, "of column", datacolname, "of file", msname, "as", visfilename
  print "Stored timeslot", timeslot, " channel", channel, "of column", modelcolname, "of file", msname, "in", visfilename
  if not applyweights:
    print "Stored timeslot", timeslot, "weights of file", msname, "in", visfilename
Example #44
0
def test_ms_read(ms, group_cols, index_cols, select_cols):
    xds = xds_from_ms(ms,
                      columns=select_cols,
                      group_cols=group_cols,
                      index_cols=index_cols,
                      chunks={"row": 2})

    order = orderby_clause(index_cols)

    with pt.table(ms, lockoptions='auto', ack=False) as T:  # noqa
        for ds in xds:
            group_col_values = [getattr(ds, c) for c in group_cols]
            where = where_clause(group_cols, group_col_values)
            query = "SELECT * FROM $T %s %s" % (where, order)

            with pt.taql(query) as Q:
                for c in select_cols:
                    np_data = Q.getcol(c)
                    dask_data = getattr(ds, c).data.compute()
                    assert np.all(np_data == dask_data)
Example #45
0
def addSubTable (image, msName, subName, removeColumns=[]):
    # Make a selection of all rows/columns of the MS subtable
    sel = pt.taql ("select * from '" + msName + "/" + subName + "'")
    # Remove the required columns.
    if len(removeColumns) > 0:
        sel.removecols (removeColumns)
    # Strip LOFAR_ from column names
    for col in sel.colnames():
        if len(col) > 6  and  col[:6] == "LOFAR_":
            sel.renamecol (col, col[6:])
    # Copy the subtable to the image and add it as a subtable.
    # Always prefix subtable name with LOFAR_.
    subNameOut = subName;
    if len(subNameOut) < 6  or  subNameOut[:6] != "LOFAR_":
        subNameOut = "LOFAR_" + subNameOut
    subtab = sel.copy (image.name() + "/" + subNameOut, deep=True)
    image.putkeyword ("ATTRGROUPS." + subNameOut, subtab)
    print "Added subtable", subNameOut, "containing", subtab.nrows(), "rows"
    subtab.close()
    sel.close()
def addSubTable(image, msName, subName, removeColumns=[]):
    # Make a selection of all rows/columns of the MS subtable
    sel = pt.taql("select * from '" + msName + "/" + subName + "'")
    # Remove the required columns.
    if len(removeColumns) > 0:
        sel.removecols(removeColumns)
    # Strip LOFAR_ from column names
    for col in sel.colnames():
        if len(col) > 6 and col[:6] == "LOFAR_":
            sel.renamecol(col, col[6:])
    # Copy the subtable to the image and add it as a subtable.
    # Always prefix subtable name with LOFAR_.
    subNameOut = subName
    if len(subNameOut) < 6 or subNameOut[:6] != "LOFAR_":
        subNameOut = "LOFAR_" + subNameOut
    subtab = sel.copy(image.name() + "/" + subNameOut, deep=True)
    image.putkeyword("ATTRGROUPS." + subNameOut, subtab)
    print "Added subtable", subNameOut, "containing", subtab.nrows(), "rows"
    subtab.close()
    sel.close()
Example #47
0
    def findUnsolvedSolutions(self, start_time, end_time, start_freq, end_freq):
        print "solverQuery::findUnsolvedSolutions()"
        
        solutionsDict={}

        if self.TIMING == True:
            t1=time.time()
        
        # Criteria to determine unsolved solutions:
        # final value == initial value (where are these stored?)
        # final value 0
        # chiSqr 0?
        taqlcmd="SELECT STARTTIME, ENDTIME, ITER, SOLUTION FROM " + self.tablename +  " WHERE STARTTIME >= "+ str(start_time) 
        + " AND ENDTIME <= " + str(end_time) + " AND STARTFREQ >= " + str(start_freq) + " AND ENDFREQ <= " + str(end_freq) 
        + " AND LASTITER=TRUE AND CHISQR=0"
        
        selection=pt.taql(taqlcmd)
        solutionsDict["last"]=selection.getcol("SOLUTION")        
        
        return solutionsDict
Example #48
0
def taql_factory(query, style='Python', tables=(), readonly=True):
    """ Calls pt.taql, converting TableProxy's in tables to pyrap tables """
    tables = [t._table_future.result() for t in tables]

    if isinstance(readonly, (tuple, list)):
        it = zip_longest(tables, readonly[:len(tables)],
                         fillvalue=readonly[-1])
    elif isinstance(readonly, bool):
        it = zip(tables, (readonly,)*len(tables))
    else:
        raise TypeError("readonly must be a bool or list of bools")

    for t, ro in it:
        t.lock(write=ro is False)

    try:
        return pt.taql(query, style=style, tables=tables)
    finally:
        for t in tables:
            t.unlock()
Example #49
0
def spw_table(tmp_path_factory, spw_chan_freqs):
    """ Simulate a SPECTRAL_WINDOW table with two spectral windows """
    spw_dir = tmp_path_factory.mktemp("spw_dir", numbered=True)
    fn = os.path.join(str(spw_dir), "SPECTRAL_WINDOW")

    create_table_query = """
    CREATE TABLE %s
    [NUM_CHAN I4,
     CHAN_FREQ R8 [NDIM=1]]
    LIMIT %d
    """ % (fn, len(spw_chan_freqs))

    with pt.taql(create_table_query) as spw:
        spw.putvarcol("NUM_CHAN", {"r%d" % i: s.shape[0]
                                   for i, s
                                   in enumerate(spw_chan_freqs)})
        spw.putvarcol("CHAN_FREQ", {"r%d" % i: s[None, :]
                                    for i, s
                                    in enumerate(spw_chan_freqs)})

    yield fn
Example #50
0
def mypointsgenerator(refms, stationname):
    ''' Generate a list of all az-el pixels that are used in the given list of measurement sets '''
    h = pyfits.open('wcs_azimuth_201.fits')
    w = pywcs.WCS(h[0].header)

    allpix = set()
    print 'refms in mypointsgenerator:', refms
    for msname in refms:
        print "Extracting az/el from", msname, "for station", stationname
        t = pt.taql(
            'select mscal.azel1() deg as AZEL from %s where [select NAME from ::ANTENNA][ANTENNA1]=="%s"'
            % (msname, stationname))
        if len(t) > 0:
            pix = set(
                tuple(azel)
                for azel in (np.array(w.wcs_sky2pix(t.getcol('AZEL'), 0)) +
                             0.5).astype(int))
            allpix = allpix.union(pix)
    print 'allpix for', refms, 'and station', stationname, ':', allpix
    for i, j in allpix:
        yield i, j
Example #51
0
def addSubTable (image, msName, subName, removeColumns=[], newColumns=''):
    # Make a selection of all rows/columns of the MS subtable
    # Add possible new columns which have to be given in TaQL form like:
    #  ", 0. as OBSERVATION_INTEGRATION_TIME, '' as SOME_STRING_COLUMN"
    sel = pt.taql ("select *" + newColumns + " from '" + msName + "/" + subName + "'")
    # Remove the required columns.
    if len(removeColumns) > 0:
        sel.removecols (removeColumns)
    # Strip LOFAR_ from column names
    for col in sel.colnames():
        if len(col) > 6  and  col[:6] == "LOFAR_":
            sel.renamecol (col, col[6:])
    # Copy the subtable to the image and add it as a subtable.
    # Always prefix subtable name with LOFAR_.
    subNameOut = subName;
    if len(subNameOut) < 6  or  subNameOut[:6] != "LOFAR_":
        subNameOut = "LOFAR_" + subNameOut
    subtab = sel.copy (image.name() + "/" + subNameOut, deep=True)
    image.putkeyword ("ATTRGROUPS." + subNameOut, subtab)
    print "Added subtable", subNameOut, "containing", subtab.nrows(), "rows"
    subtab.close()
    sel.close()
Example #52
0
def addSubTable(image, msName, subName, removeColumns=[], newColumns=''):
    # Make a selection of all rows/columns of the MS subtable
    # Add possible new columns which have to be given in TaQL form like:
    #  ", 0. as OBSERVATION_INTEGRATION_TIME, '' as SOME_STRING_COLUMN"
    sel = pt.taql("select *" + newColumns + " from '" + msName + "/" +
                  subName + "'")
    # Remove the required columns.
    if len(removeColumns) > 0:
        sel.removecols(removeColumns)
    # Strip LOFAR_ from column names
    for col in sel.colnames():
        if len(col) > 6 and col[:6] == "LOFAR_":
            sel.renamecol(col, col[6:])
    # Copy the subtable to the image and add it as a subtable.
    # Always prefix subtable name with LOFAR_.
    subNameOut = subName
    if len(subNameOut) < 6 or subNameOut[:6] != "LOFAR_":
        subNameOut = "LOFAR_" + subNameOut
    subtab = sel.copy(image.name() + "/" + subNameOut, deep=True)
    image.putkeyword("ATTRGROUPS." + subNameOut, subtab)
    print "Added subtable", subNameOut, "containing", subtab.nrows(), "rows"
    subtab.close()
    sel.close()
Example #53
0
def getValuesGrid(parmdb, solType):
    '''Get values grid mimics the behaviour of getValues grid of lofar.parmdb. 
    We assume the data is stored as a regular grid in frequency and time. '''
    names = pt.table(parmdb + "/NAMES").getcol("NAME")
    myt = pt.table(parmdb)
    indices = [
        i for i, j in enumerate(names)
        if all([True if name in j else False for name in solType])
    ]
    data = {}
    parmkeys = ['freqs', 'timewidths', 'freqwidths', 'values', 'times']
    for idx in indices:
        newt = pt.taql("SELECT * from $parmdb where NAMEID == $idx")
        data[names[idx]] = {}
        freqsc = [newt.getcol("STARTX")[0], newt.getcol("ENDX")[0]]
        timesc = [newt.getcol("STARTY"), newt.getcol("ENDY")]
        values = newt.getcol("VALUES")
        Ntimes = values.shape[0] * values.shape[1]
        Nfreq = values.shape[2]
        dfreq = (freqsc[1] - freqsc[0]) / Nfreq
        freqs = np.linspace(freqsc[0] + 0.5 * dfreq,
                            freqsc[1] + 0.5 * dfreq,
                            Nfreq,
                            endpoint=False)
        freqwidths = dfreq * np.ones_like(freqs)
        dtimes = (timesc[-1][-1] - timesc[0][0]) / Ntimes
        times = np.linspace(timesc[0][0] + 0.5 * dtimes,
                            timesc[-1][-1] + 0.5 * dtimes,
                            Ntimes,
                            endpoint=False)
        timewidths = dtimes * np.ones_like(times)
        data[names[idx]]['freqs'] = freqs
        data[names[idx]]['freqwidths'] = freqwidths
        data[names[idx]]['times'] = times
        data[names[idx]]['timewidths'] = timewidths
        data[names[idx]]['values'] = values
    return data
Example #54
0
def addQualityTable (image, usedCounts, visCounts):
    # Create the table using TaQL.
    tab = pt.taql ("create table '" + image.name() + "/LOFAR_QUALITY' " + 
                   "QUALITY_MEASURE string, VALUE string, FLAG_ROW bool")
    # Get the rms noise of I,Q,U,V as list of tuples.
    noises = grn.get_rms_noise (image.name())
    for noise in noises:
        row = tab.nrows()
        tab.addrows (2)
        tab.putcell ("QUALITY_MEASURE", row, "RMS_NOISE_"+noise[0])
        tab.putcell ("VALUE", row, str(noise[1]))
        tab.putcell ("FLAG_ROW", row, False)
        perc = 100.
        nvis = 1.0 * visCounts.sum()
        if nvis > 0:
            # Get flagged percentage to 2 decimals.
            perc = int(10000. * (1 - usedCounts.sum() / nvis) + 0.5) / 100.
        tab.putcell ("QUALITY_MEASURE", row+1, "PERC_FLAGGED_VIS")
        tab.putcell ("VALUE", row+1, str(perc)[:5])
        tab.putcell ("FLAG_ROW", row+1, False)
    tab.flush()
    image.putkeyword ("ATTRGROUPS." + "LOFAR_QUALITY", tab)
    print "Added subtable LOFAR_QUALITY containing", tab.nrows(), "rows"
    tab.close()
Example #55
0
def addQualityTable(image, usedCounts, visCounts):
    # Create the table using TaQL.
    tab = pt.taql("create table '" + image.name() + "/LOFAR_QUALITY' " +
                  "QUALITY_MEASURE string, VALUE string, FLAG_ROW bool")
    # Get the rms noise of I,Q,U,V as list of tuples.
    noises = grn.get_rms_noise(image.name())
    for noise in noises:
        row = tab.nrows()
        tab.addrows(2)
        tab.putcell("QUALITY_MEASURE", row, "RMS_NOISE_" + noise[0])
        tab.putcell("VALUE", row, str(noise[1]))
        tab.putcell("FLAG_ROW", row, False)
        perc = 100.
        nvis = 1.0 * visCounts.sum()
        if nvis > 0:
            # Get flagged percentage to 2 decimals.
            perc = int(10000. * (1 - usedCounts.sum() / nvis) + 0.5) / 100.
        tab.putcell("QUALITY_MEASURE", row + 1, "PERC_FLAGGED_VIS")
        tab.putcell("VALUE", row + 1, str(perc)[:5])
        tab.putcell("FLAG_ROW", row + 1, False)
    tab.flush()
    image.putkeyword("ATTRGROUPS." + "LOFAR_QUALITY", tab)
    print "Added subtable LOFAR_QUALITY containing", tab.nrows(), "rows"
    tab.close()
Example #56
0
# Get the first pointing of the first antenna
field_table = pt.table(msname + '/FIELD')
field_no = 0
direction = field_table.getcol('PHASE_DIR')
ra = direction[ant_no, field_no, 0]
if ra < 0: ra += 2 * numpy.pi
dec = direction[ant_no, field_no, 1]
targets.insert(0, {'name': 'Pointing', 'ra': ra, 'dec': dec})
print "Target ra/dec (deg):", targets[0]['ra'] * 180 / numpy.pi, targets[0][
    'dec'] * 180 / numpy.pi
print targets
field_table.close()

# Get a ordered list of unique time stamps from the measurement set
time_table = pt.taql('select TIME from $1 orderby distinct TIME', tables=[ms])
time = time_table.getcol('TIME')
time1 = time / 3600.0
time1 = time1 - floor(time1[0] / 24) * 24

clf()

ra_qa = qa.quantity(targets[0]['ra'], 'rad')
dec_qa = qa.quantity(targets[0]['dec'], 'rad')
pointing = me.direction('j2000', ra_qa, dec_qa)

for target in targets:

    t = qa.quantity(time[0], 's')
    t1 = me.epoch('utc', t)
    me.doframe(t1)
me.doframe( position )
ant_table.close()


# Get the first pointing of the first antenna
field_table = pt.table(msname + '/FIELD', ack=False)
field_no = 0
direction = field_table.getcol('PHASE_DIR')
ra = direction[ ant_no, field_no, 0 ]
dec = direction[ ant_no, field_no, 1 ]
targets.insert(0, {'name' : 'Pointing', 'ra' : ra, 'dec' : dec})
field_table.close()


# Get a ordered list of unique time stamps from the measurement set
time_table = pt.taql('select DISTINCT TIME from $1', tables = [ms])
time = time_table.getcol('TIME')

time1 = time/3600.0
time1 = (time1 - floor(time1[0]/24)*24)*3600

time2 = map(datetime.datetime.fromtimestamp, time1)
time2 = dates.date2num(time2)

clf()

ra_qa  = qa.quantity( targets[0]['ra'], 'rad' )
dec_qa = qa.quantity( targets[0]['dec'], 'rad' )
pointing =  me.direction('j2000', ra_qa, dec_qa)

for target in targets:
import RMextract.getRM as gt
import pyrap.tables as tab
from pylab import *
a=tab.taql('calc MJD("2013/11/01/00:00:00")')[0]*3600*24
b=tab.taql('calc MJD("2013/11/09/00:00:00")')[0]*3600*24

statpos=gt.PosTools.posCS002
pointing=array([ 2.15374123,  0.8415521 ]) #3C196
bigdict=gt.getRM(ionexPath='./IONEXdata/',earth_rot=0,ha_limit=45*np.pi/180,radec=pointing,timestep=1800, timerange = [a, b],stat_positions=[statpos,])
flags=np.logical_not(bigdict['flags']['st1'])
times=bigdict['times'][np.logical_not(flags)]
timess=[tm/(3600*24.) for tm in times]
dates=tab.taql('calc ctod($timess)')
if 'array' in dates.keys():
    dates=dates['array']
else:
    dates=dates[dates.keys()[0]] #backward compatibility with older casacore vresions
format="%Y/%m/%d/%H:%M:%S.%f"
mydatetimes=[datetime.datetime.strptime(mydate,format) for mydate in dates]

bpar=bigdict['Bpar']['st1'][np.logical_not(flags)]
bfield=bigdict['BField']['st1'][np.logical_not(flags)]
abs_field=np.sqrt(np.sum(np.square(bfield),axis=1))
bperp=abs_field-np.abs(bpar)
plot_date(mydatetimes,bperp,'-')
plot_date(mydatetimes,bpar,'-')
plot_date(mydatetimes,bperp)
plot_date(mydatetimes,bpar)
plt.gcf().autofmt_xdate()
ylabel("Bpar (nGaus)")
show()