コード例 #1
0
    def from_casa_image(filename, dropdeg=True, skipdata=False,
                        skipvalid=False, skipcs=False):
        """
        Load a cube (into memory?) from a CASA image. By default it will transpose
        the cube into a 'python' order and drop degenerate axes. These options can
        be suppressed. The object holds the coordsys object from the image in
        memory.
        """

        # use the ia tool to get the file contents
        ia.open(filename)

        # read in the data
        if not skipdata:
            data = ia.getchunk(dropdeg=dropdeg)

        # CASA stores validity of data as a mask
        if not skipvalid:
            valid = ia.getchunk(getmask=True, dropdeg=dropdeg)

        # transpose is dealt with within the cube object
            
        # read in coordinate system object
        casa_cs = ia.coordsys()

        wcs = wcs_casa2astropy(casa_cs)

        # don't need this yet
        # stokes = get_casa_axis(temp_cs, wanttype="Stokes", skipdeg=False,)

        #    if stokes == None:
        #        order = np.arange(self.data.ndim)
        #    else:
        #        order = []
        #        for ax in np.arange(self.data.ndim+1):
        #            if ax == stokes:
        #                continue
        #            order.append(ax)

        #    self.casa_cs = ia.coordsys(order)
            
            # This should work, but coordsys.reorder() has a bug
            # on the error checking. JIRA filed. Until then the
            # axes will be reversed from the original.

            #if transpose == True:
            #    new_order = np.arange(self.data.ndim)
            #    new_order = new_order[-1*np.arange(self.data.ndim)-1]
            #    print new_order
            #    self.casa_cs.reorder(new_order)
        
        # close the ia tool
        ia.close()

        metadata = {'filename':filename}

        mask = SpectralCubeMask(wcs, np.logical_not(valid))
        cube = SpectralCube(data, wcs, mask, metadata=metadata)

        return cube
コード例 #2
0
def test_casa_mask(data_adv, tmp_path):

    cube = SpectralCube.read(data_adv)

    mask_array = np.array([[True, False], [False, False], [True, True]])
    bool_mask = BooleanArrayMask(mask=mask_array,
                                 wcs=cube._wcs,
                                 shape=cube.shape)
    cube = cube.with_mask(bool_mask)

    make_casa_mask(cube,
                   str(tmp_path / 'casa.mask'),
                   add_stokes=False,
                   append_to_image=False,
                   overwrite=True)

    ia = casatools.image()

    ia.open(str(tmp_path / 'casa.mask'))

    casa_mask = ia.getchunk()

    coords = ia.coordsys()

    ia.unlock()
    ia.close()
    ia.done()

    # Test masks
    # Mask array is broadcasted to the cube shape. Mimic this, switch to ints,
    # and transpose to match CASA image.
    compare_mask = np.tile(mask_array, (4, 1, 1)).astype('int16').T
    assert np.all(compare_mask == casa_mask)

    # Test WCS info

    # Convert back to an astropy wcs object so transforms are dealt with.
    casa_wcs = wcs_casa2astropy(ia, coords)
    header = casa_wcs.to_header()  # Invokes transform

    # Compare some basic properties EXCLUDING the spectral axis
    assert np.allclose(cube.wcs.wcs.crval[:2], casa_wcs.wcs.crval[:2])
    assert np.all(cube.wcs.wcs.cdelt[:2] == casa_wcs.wcs.cdelt[:2])
    assert np.all(list(cube.wcs.wcs.cunit)[:2] == list(casa_wcs.wcs.cunit)[:2])
    assert np.all(list(cube.wcs.wcs.ctype)[:2] == list(casa_wcs.wcs.ctype)[:2])

    # Reference pixels in CASA are 1 pixel off.
    assert_allclose(cube.wcs.wcs.crpix, casa_wcs.wcs.crpix, atol=1.0)
コード例 #3
0
def test_casa_mask():

    cube = SpectralCube.read(path('adv.fits'))

    mask_array = np.array([[True, False], [False, False], [True, True]])
    bool_mask = BooleanArrayMask(mask=mask_array, wcs=cube._wcs,
                                 shape=cube.shape)
    cube = cube.with_mask(bool_mask)

    if os.path.exists('casa.mask'):
        os.system('rm -rf casa.mask')

    make_casa_mask(cube, 'casa.mask', add_stokes=False,
                   append_to_image=False, overwrite=True)

    ia.open('casa.mask')

    casa_mask = ia.getchunk()

    coords = ia.coordsys()

    ia.close()

    # Test masks
    # Mask array is broadcasted to the cube shape. Mimic this, switch to ints,
    # and transpose to match CASA image.
    compare_mask = np.tile(mask_array, (4, 1, 1)).astype('int16').T
    assert np.all(compare_mask == casa_mask)

    # Test WCS info

    # Convert back to an astropy wcs object so transforms are dealt with.
    casa_wcs = wcs_casa2astropy(coords)
    header = casa_wcs.to_header()  # Invokes transform

    # Compare some basic properties EXCLUDING the spectral axis
    assert np.allclose(cube.wcs.wcs.crval[:2], casa_wcs.wcs.crval[:2])
    assert np.all(cube.wcs.wcs.cdelt[:2] == casa_wcs.wcs.cdelt[:2])
    assert np.all(list(cube.wcs.wcs.cunit)[:2] == list(casa_wcs.wcs.cunit)[:2])
    assert np.all(list(cube.wcs.wcs.ctype)[:2] == list(casa_wcs.wcs.ctype)[:2])

    # Reference pixels in CASA are 1 pixel off.
    assert_allclose(cube.wcs.wcs.crpix, casa_wcs.wcs.crpix,
                    atol=1.0)
コード例 #4
0
ファイル: CubeStats_AT.py プロジェクト: teuben/admit
    def run(self):
        """Runs the task.

           Parameters
           ----------
           None

           Returns
           -------
           None
        """

        self._summary = {}
        dt = utils.Dtime("CubeStats")

        #maxvrms = 2.0      # maximum variation in rms allowed (hardcoded for now)
        #maxvrms = -1.0     # turn maximum variation in rms allowed off
        maxvrms = self.getkey("maxvrms")

        psample = -1
        psample = self.getkey("psample")        

        # BDP's used :
        #   b1 = input BDP
        #   b2 = output BDP

        b1 = self._bdp_in[0]
        fin = b1.getimagefile(bt.CASA)

        bdp_name = self.mkext(fin,'cst')
        b2 = CubeStats_BDP(bdp_name)
        self.addoutput(b2)

        # PeakPointPlot 
        use_ppp = self.getkey("ppp")

        # peakstats: not enabled for mortal users yet
        # peakstats = (psample=1, numsigma=4, minchan=3, maxgap=2, peakfit=False)
        pnumsigma = 4
        minchan   = 3
        maxgap    = 2
        peakfit   = False             # True will enable a true gaussian fit
        
        # numsigma:  adding all signal > numsigma ; not user enabled;   for peaksum.
        numsigma = -1.0
        numsigma = 3.0

        # grab the new robust statistics. If this is used, 'rms' will be the RMS,
        # else we will use RMS = 1.4826*MAD (MAD does a decent job on outliers as well)
        # and was the only method available before CASA 4.4 when robust was implemented
        robust = self.getkey("robust")
        rargs = casautil.parse_robust(robust)
        nrargs = len(rargs)

        if nrargs == 0:
           sumrargs = "medabsdevmed"      # for the summary, indicate the default robust
        else:
           sumrargs = str(rargs)

        self._summary["rmsmethd"] = SummaryEntry([sumrargs,fin],"CubeStats_AT",self.id(True))
        #@todo think about using this instead of putting 'fin' in all the SummaryEntry
        #self._summary["casaimage"] = SummaryEntry(fin,"CubeStats_AT",self.id(True))

        # extra CASA call to get the freq's in GHz, as these are not in imstat1{}
        # @todo what if the coordinates are not in FREQ ?
        # Note: CAS-7648 bug on 3D cubes
        if False:
            # csys method
            ia.open(self.dir(fin))
            csys = ia.coordsys() 
            spec_axis = csys.findaxisbyname("spectral") 
            # ieck, we need a valid position, or else it will come back and "Exception: All selected pixels are masked"
            #freqs = ia.getprofile(spec_axis, region=rg.box([0,0],[0,0]))['coords']/1e9
            #freqs = ia.getprofile(spec_axis)['coords']/1e9
            freqs = ia.getprofile(spec_axis,unit="GHz")['coords']
            dt.tag("getprofile")
        else:
            # old imval method 
            #imval0 = casa.imval(self.dir(fin),box='0,0,0,0')     # this fails on 3D
            imval0 = casa.imval(self.dir(fin))
            freqs = imval0['coords'].transpose()[2]/1e9
            dt.tag("imval")
        nchan = len(freqs)
        chans = np.arange(nchan)

        # call CASA to get what we want
        # imstat0 is the whole cube, imstat1 the plane based statistics
        # warning: certain robust stats (**rargs) on the whole cube are going to be very slow
        dt.tag("start")
        imstat0 = casa.imstat(self.dir(fin),           logfile=self.dir('imstat0.logfile'),append=False,**rargs)
        dt.tag("imstat0")
        imstat1 = casa.imstat(self.dir(fin),axes=[0,1],logfile=self.dir('imstat1.logfile'),append=False,**rargs)
        dt.tag("imstat1")
        # imm = casa.immoments(self.dir(fin),axis='spec', moments=8, outfile=self.dir('ppp.im'))
        if nrargs > 0:
            # need to get the peaks without rubust
            imstat10 = casa.imstat(self.dir(fin),           logfile=self.dir('imstat0.logfile'),append=True)
            dt.tag("imstat10")
            imstat11 = casa.imstat(self.dir(fin),axes=[0,1],logfile=self.dir('imstat1.logfile'),append=True)
            dt.tag("imstat11")

        # grab the relevant plane-based things from imstat1
        if nrargs == 0:
            mean    = imstat1["mean"]
            sigma   = imstat1["medabsdevmed"]*1.4826     # see also: astropy.stats.median_absolute_deviation()
            peakval = imstat1["max"]
            minval  = imstat1["min"]
        else:
            mean    = imstat1["mean"]
            sigma   = imstat1["rms"]
            peakval = imstat11["max"]
            minval  = imstat11["min"]

        if True:
            # work around a bug in imstat(axes=[0,1]) for last channel [CAS-7697]
            for i in range(len(sigma)):
                if sigma[i] == 0.0:
                    minval[i] = peakval[i] = 0.0

        # too many variations in the RMS ?
        sigma_pos = sigma[np.where(sigma>0)]
        smin = sigma_pos.min()
        smax = sigma_pos.max()
        logging.info("sigma varies from %f to %f; %d/%d channels ok" % (smin,smax,len(sigma_pos),len(sigma)))
        if maxvrms > 0:
            if smax/smin > maxvrms:
                cliprms = smin * maxvrms
                logging.warning("sigma varies too much, going to clip to %g (%g > %g)" % (cliprms, smax/smin, maxvrms))
                sigma = np.where(sigma < cliprms, sigma, cliprms)

        # @todo   (and check again) for foobar.fits all sigma's became 0 when robust was selected
        #         was this with mask=True/False?

        # PeakPointPlot (can be expensive, hence the option)
        if use_ppp:
            logging.info("Computing MaxPos for PeakPointPlot")
            xpos    = np.zeros(nchan)
            ypos    = np.zeros(nchan)
            peaksum = np.zeros(nchan)

            ia.open(self.dir(fin))
            for i in range(nchan):
                if sigma[i] > 0.0:
                    plane = ia.getchunk(blc=[0,0,i,-1],trc=[-1,-1,i,-1],dropdeg=True)
                    v = ma.masked_invalid(plane)
                    v_abs = np.absolute(v)
                    max = np.unravel_index(v_abs.argmax(), v_abs.shape)
                    xpos[i] = max[0]
                    ypos[i] = max[1]
                    if numsigma > 0.0:
                        peaksum[i] = ma.masked_less(v,numsigma * sigma[i]).sum()
            peaksum = np.nan_to_num(peaksum)    # put 0's where nan's are found
            ia.close()
            dt.tag("ppp")

        nzeros = len(np.where(sigma<=0.0))
        if nzeros > 0:
            zeroch = np.where(sigma<=0.0)
            logging.warning("There are %d fully masked channels (%s)" % (nzeros,str(zeroch)))

        # construct the admit Table for CubeStats_BDP
        # note data needs to be a tuple, later to be column_stack'd
        if use_ppp:
            labels = ["channel" ,"frequency" ,"mean"    ,"sigma"   ,"max"     ,"maxposx" ,"maxposy" ,"min",     "peaksum"]
            units  = ["number"  ,"GHz"       ,"Jy/beam" ,"Jy/beam" ,"Jy/beam" ,"number"  ,"number"  ,"Jy/beam", "Jy"]
            data   = (chans     ,freqs       ,mean      ,sigma     ,peakval   ,xpos      ,ypos      ,minval,    peaksum)

        else:
            labels = ["channel" ,"frequency" ,"mean"    ,"sigma"   ,"max"     ,"min"]
            units  = ["number"  ,"GHz"       ,"Jy/beam" ,"Jy/beam" ,"Jy/beam" ,"Jy/beam"]
            data   = (chans     ,freqs       ,mean      ,sigma     ,peakval   ,minval)

        table = Table(columns=labels,units=units,data=np.column_stack(data))
        b2.setkey("table",table)

        # get the full cube statistics, it depends if robust was pre-selected
        if nrargs == 0:
            mean0  = imstat0["mean"][0]
            sigma0 = imstat0["medabsdevmed"][0]*1.4826
            peak0  = imstat0["max"][0]
            b2.setkey("mean" , float(mean0))
            b2.setkey("sigma", float(sigma0))
            b2.setkey("minval",float(imstat0["min"][0]))
            b2.setkey("maxval",float(imstat0["max"][0]))
            b2.setkey("minpos",imstat0["minpos"][:3].tolist())     #? [] or array(..dtype=int32) ??
            b2.setkey("maxpos",imstat0["maxpos"][:3].tolist())     #? [] or array(..dtype=int32) ??
            logging.info("CubeMax: %f @ %s" % (imstat0["max"][0],str(imstat0["maxpos"])))
            logging.info("CubeMin: %f @ %s" % (imstat0["min"][0],str(imstat0["minpos"])))
            logging.info("CubeRMS: %f" % sigma0)
        else:
            mean0  = imstat0["mean"][0]
            sigma0 = imstat0["rms"][0]
            peak0  = imstat10["max"][0]
            b2.setkey("mean" , float(mean0))
            b2.setkey("sigma", float(sigma0))
            b2.setkey("minval",float(imstat10["min"][0]))
            b2.setkey("maxval",float(imstat10["max"][0]))
            b2.setkey("minpos",imstat10["minpos"][:3].tolist())     #? [] or array(..dtype=int32) ??
            b2.setkey("maxpos",imstat10["maxpos"][:3].tolist())     #? [] or array(..dtype=int32) ??
            logging.info("CubeMax: %f @ %s" % (imstat10["max"][0],str(imstat10["maxpos"])))
            logging.info("CubeMin: %f @ %s" % (imstat10["min"][0],str(imstat10["minpos"])))
            logging.info("CubeRMS: %f" % sigma0)
        b2.setkey("robust",robust)
        rms_ratio = imstat0["rms"][0]/sigma0
        logging.info("RMS Sanity check %f" % rms_ratio)
        if rms_ratio > 1.5:
            logging.warning("RMS sanity check = %f.  Either bad sidelobes, lotsa signal, or both" % rms_ratio)
        logging.regression("CST: %f %f" % (sigma0, rms_ratio))

        # plots: no plots need to be made when nchan=1 for continuum
        # however we could make a histogram, overlaying the "best" gauss so 
        # signal deviations are clear?

        logging.info('mean,rms,S/N=%f %f %f' % (mean0,sigma0,peak0/sigma0))

        if nchan == 1:
            # for a continuum/1-channel we only need to stuff some numbers into the _summary
            self._summary["chanrms"] = SummaryEntry([float(sigma0), fin], "CubeStats_AT", self.id(True))
            self._summary["dynrange"] = SummaryEntry([float(peak0)/float(sigma0), fin], "CubeStats_AT", self.id(True))
            self._summary["datamean"] = SummaryEntry([float(mean0), fin], "CubeStats_AT", self.id(True))
        else:
            y1 = np.log10(ma.masked_invalid(peakval))
            y2 = np.log10(ma.masked_invalid(sigma))
            y3 = y1-y2
            y4 = np.log10(ma.masked_invalid(-minval))
            y5 = y1-y4
            y = [y1,y2,y3,y4]
            title = 'CubeStats: ' + bdp_name+'_0'
            xlab  = 'Channel'
            ylab  = 'log(Peak,Noise,Peak/Noise)'
            labels = ['log(peak)','log(rms noise)','log(peak/noise)','log(|minval|)']
            myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir())
            segp = [[chans[0],chans[nchan-1],math.log10(sigma0),math.log10(sigma0)]]
            myplot.plotter(chans,y,title,bdp_name+"_0",xlab=xlab,ylab=ylab,segments=segp,labels=labels,thumbnail=True)
            imfile = myplot.getFigure(figno=myplot.figno,relative=True)
            thumbfile = myplot.getThumbnail(figno=myplot.figno,relative=True)

            image0 = Image(images={bt.PNG:imfile},thumbnail=thumbfile,thumbnailtype=bt.PNG,description="CubeStats_0")
            b2.addimage(image0,"im0")

            if use_ppp:
                # new trial for Lee
                title = 'PeakSum: (numsigma=%.1f)' % (numsigma)
                ylab = 'Jy*N_ppb'
                myplot.plotter(chans,[peaksum],title,bdp_name+"_00",xlab=xlab,ylab=ylab,thumbnail=False)

            if True:
                # hack ascii table
                y30 = np.where(sigma > 0, np.log10(peakval/sigma), 0.0)
                table2 = Table(columns=["freq","log(P/N)"],data=np.column_stack((freqs,y30)))
                table2.exportTable(self.dir("testCubeStats.tab"))
                del table2

            # the "box" for the "spectrum" is all pixels.  Don't know how to 
            # get this except via shape.
            ia.open(self.dir(fin))
            s = ia.summary()
            ia.close()
            if 'shape' in s:
                specbox = (0,0,s['shape'][0],s['shape'][1])
            else:
                specbox = ()

            caption = "Emission characteristics as a function of channel, as derived by CubeStats_AT "
            caption += "(cyan: global rms,"
            caption += " green: noise per channel,"
            caption += " blue: peak value per channel,"
            caption += " red: peak/noise per channel)."
            self._summary["spectra"] = SummaryEntry([0, 0, str(specbox), 'Channel', imfile, thumbfile , caption, fin], "CubeStats_AT", self.id(True))
            self._summary["chanrms"] = SummaryEntry([float(sigma0), fin], "CubeStats_AT", self.id(True))

            # @todo Will imstat["max"][0] always be equal to s['datamax']?  If not, why not?
            if 'datamax' in s:
                self._summary["dynrange"] = SummaryEntry([float(s['datamax']/sigma0), fin], "CubeStats_AT", self.id(True))
            else:
                self._summary["dynrange"] = SummaryEntry([float(imstat0["max"][0]/sigma0), fin], "CubeStats_AT", self.id(True))
            self._summary["datamean"] = SummaryEntry([imstat0["mean"][0], fin], "CubeStats_AT", self.id(True))

            title = bdp_name + "_1"
            xlab =  'log(Peak,Noise,P/N)'
            myplot.histogram([y1,y2,y3],title,bdp_name+"_1",xlab=xlab,thumbnail=True)

            imfile = myplot.getFigure(figno=myplot.figno,relative=True)
            thumbfile = myplot.getThumbnail(figno=myplot.figno,relative=True)
            image1 = Image(images={bt.PNG:imfile},thumbnail=thumbfile,thumbnailtype=bt.PNG,description="CubeStats_1")
            b2.addimage(image1,"im1")

            # note that the 'y2' can have been clipped, which can throw off stats.robust()
            # @todo  should set a mask for those.

            title = bdp_name + "_2"
            xlab = 'log(Noise))'
            n = len(y2)
            ry2 = stats.robust(y2)
            y2_mean = ry2.mean()
            y2_std  = ry2.std()
            if n>9: logging.debug("NORMALTEST2: %s" % str(scipy.stats.normaltest(ry2)))
            myplot.hisplot(y2,title,bdp_name+"_2",xlab=xlab,gauss=[y2_mean,y2_std],thumbnail=True)

            title = bdp_name + "_3"
            xlab = 'log(diff[Noise])'
            n = len(y2)
            # dy2 = y2[0:-2] - y2[1:-1]
            dy2 = ma.masked_equal(y2[0:-2] - y2[1:-1],0.0).compressed()
            rdy2 = stats.robust(dy2)
            dy2_mean = rdy2.mean()
            dy2_std  = rdy2.std()
            if n>9: logging.debug("NORMALTEST3: %s" % str(scipy.stats.normaltest(rdy2)))
            myplot.hisplot(dy2,title,bdp_name+"_3",xlab=xlab,gauss=[dy2_mean,dy2_std],thumbnail=True)


            title = bdp_name + "_4"
            xlab = 'log(Signal/Noise))'
            n = len(y3)
            ry3 = stats.robust(y3)
            y3_mean = ry3.mean()
            y3_std  = ry3.std()
            if n>9: logging.debug("NORMALTEST4: %s" % str(scipy.stats.normaltest(ry3)))
            myplot.hisplot(y3,title,bdp_name+"_4",xlab=xlab,gauss=[y3_mean,y3_std],thumbnail=True)

            title = bdp_name + "_5"
            xlab = 'log(diff[Signal/Noise)])'
            n = len(y3)
            dy3 = y3[0:-2] - y3[1:-1]
            rdy3 = stats.robust(dy3)
            dy3_mean = rdy3.mean()
            dy3_std  = rdy3.std()
            if n>9: logging.debug("NORMALTEST5: %s" % str(scipy.stats.normaltest(rdy3)))
            myplot.hisplot(dy3,title,bdp_name+"_5",xlab=xlab,gauss=[dy3_mean,dy3_std],thumbnail=True)


            title = bdp_name + "_6"
            xlab = 'log(Peak+Min)'
            n = len(y1)
            ry5 = stats.robust(y5)
            y5_mean = ry5.mean()
            y5_std  = ry5.std()
            if n>9: logging.debug("NORMALTEST6: %s" % str(scipy.stats.normaltest(ry5)))
            myplot.hisplot(y5,title,bdp_name+"_6",xlab=xlab,gauss=[y5_mean,y5_std],thumbnail=True)

            logging.debug("LogPeak: m,s= %f %f min/max %f %f" % (y1.mean(),y1.std(),y1.min(),y1.max()))
            logging.debug("LogNoise: m,s= %f %f %f %f min/max %f %f" % (y2.mean(),y2.std(),y2_mean,y2_std,y2.min(),y2.max()))
            logging.debug("LogDeltaNoise: RMS/sqrt(2)= %f %f " % (dy2.std()/math.sqrt(2),dy2_std/math.sqrt(2)))
            logging.debug("LogDeltaP/N:   RMS/sqrt(2)= %f %f" % (dy3.std()/math.sqrt(2),dy3_std/math.sqrt(2)))
            logging.debug("LogPeak+Min: robust m,s= %f %f" % (y5_mean,y5_std))

            # compute two ratios that should both be near 1.0 if noise is 'normal'
            ratio  = y2.std()/(dy2.std()/math.sqrt(2))
            ratio2 = y2_std/(dy2_std/math.sqrt(2))
            logging.info("RMS BAD VARIATION RATIO: %f %f" % (ratio,ratio2))

        # making PPP plot
        if nchan > 1 and use_ppp:
            smax = 10
            gamma = 0.75

            z0 = peakval/peakval.max()
            # point sizes
            s = np.pi * ( smax * (z0**gamma) )**2
            cmds = ["grid", "axis equal"]
            title = "Peak Points per channel"
            pppimage = bdp_name + '_ppp'
            myplot.scatter(xpos,ypos,title=title,figname=pppimage,size=s,color=chans,cmds=cmds,thumbnail=True)
            pppimage     = myplot.getFigure(figno=myplot.figno,relative=True)
            pppthumbnail = myplot.getThumbnail(figno=myplot.figno,relative=True)
            caption = "Peak point plot: Locations of per-channel peaks in the image cube " + fin
            self._summary["peakpnt"] = SummaryEntry([pppimage, pppthumbnail, caption, fin], "CubeStats_AT", self.id(True))
        dt.tag("plotting")

        # making PeakStats plot
        if nchan > 1 and psample > 0:
            logging.info("Computing peakstats")
            # grab peak,mean and width values for all peaks
            (pval,mval,wval) = peakstats(self.dir(fin),freqs,sigma0,pnumsigma,minchan,maxgap,psample,peakfit)
            title = "PeakStats: cutoff = %g" % (sigma0*pnumsigma)
            xlab = 'Peak value'
            ylab = 'FWHM (channels)'
            pppimage = bdp_name + '_peakstats'
            cval = mval
            myplot.scatter(pval,wval,title=title,xlab=xlab,ylab=ylab,color=cval,figname=pppimage,thumbnail=False)
            dt.tag("peakstats")
            

        # myplot.final()    # pjt debug 
        # all done!
        dt.tag("done")

        taskargs = "robust=" + sumrargs 
        if use_ppp: 
            taskargs = taskargs + " ppp=True"
        else: 
            taskargs = taskargs + " ppp=False"
        for v in self._summary:
            self._summary[v].setTaskArgs(taskargs)

        dt.tag("summary")
        dt.end()
コード例 #5
0
ファイル: casa_image.py プロジェクト: jpinedaf/spectral-cube
def load_casa_image(filename, skipdata=False, skipvalid=False, skipcs=False, **kwargs):
    """
    Load a cube (into memory?) from a CASA image. By default it will transpose
    the cube into a 'python' order and drop degenerate axes. These options can
    be suppressed. The object holds the coordsys object from the image in
    memory.
    """

    try:
        from taskinit import ia
    except ImportError:
        raise ImportError("Could not import CASA (casac) and therefore cannot read CASA .image files")

    # use the ia tool to get the file contents
    ia.open(filename)

    # read in the data
    if not skipdata:
        data = ia.getchunk()

    # CASA stores validity of data as a mask
    if not skipvalid:
        valid = ia.getchunk(getmask=True)

    # transpose is dealt with within the cube object

    # read in coordinate system object
    casa_cs = ia.coordsys()

    wcs = wcs_casa2astropy(casa_cs)

    unit = ia.brightnessunit()

    # don't need this yet
    # stokes = get_casa_axis(temp_cs, wanttype="Stokes", skipdeg=False,)

    #    if stokes == None:
    #        order = np.arange(self.data.ndim)
    #    else:
    #        order = []
    #        for ax in np.arange(self.data.ndim+1):
    #            if ax == stokes:
    #                continue
    #            order.append(ax)

    #    self.casa_cs = ia.coordsys(order)

    # This should work, but coordsys.reorder() has a bug
    # on the error checking. JIRA filed. Until then the
    # axes will be reversed from the original.

    # if transpose == True:
    #    new_order = np.arange(self.data.ndim)
    #    new_order = new_order[-1*np.arange(self.data.ndim)-1]
    #    print new_order
    #    self.casa_cs.reorder(new_order)

    # close the ia tool
    ia.close()

    meta = {"filename": filename, "BUNIT": unit}

    if wcs.naxis == 3:
        mask = BooleanArrayMask(np.logical_not(valid), wcs)
        cube = SpectralCube(data, wcs, mask, meta=meta)

    elif wcs.naxis == 4:
        data, wcs = cube_utils._split_stokes(data.T, wcs)
        mask = {}
        for component in data:
            data[component], wcs_slice = cube_utils._orient(data[component], wcs)
            mask[component] = LazyMask(np.isfinite, data=data[component], wcs=wcs_slice)

        cube = StokesSpectralCube(data, wcs_slice, mask, meta=meta)

    return cube
コード例 #6
0
def load_casa_image(filename, skipdata=False,
                    skipvalid=False, skipcs=False, **kwargs):
    """
    Load a cube (into memory?) from a CASA image. By default it will transpose
    the cube into a 'python' order and drop degenerate axes. These options can
    be suppressed. The object holds the coordsys object from the image in
    memory.
    """

    try:
        from taskinit import ia
    except ImportError:
        raise ImportError("Could not import CASA (casac) and therefore cannot read CASA .image files")

    # use the ia tool to get the file contents
    ia.open(filename)

    # read in the data
    if not skipdata:
        data = ia.getchunk()

    # CASA stores validity of data as a mask
    if not skipvalid:
        valid = ia.getchunk(getmask=True)

    # transpose is dealt with within the cube object

    # read in coordinate system object
    casa_cs = ia.coordsys()

    wcs = wcs_casa2astropy(casa_cs)

    unit = ia.brightnessunit()

    # don't need this yet
    # stokes = get_casa_axis(temp_cs, wanttype="Stokes", skipdeg=False,)

    #    if stokes == None:
    #        order = np.arange(self.data.ndim)
    #    else:
    #        order = []
    #        for ax in np.arange(self.data.ndim+1):
    #            if ax == stokes:
    #                continue
    #            order.append(ax)

    #    self.casa_cs = ia.coordsys(order)

        # This should work, but coordsys.reorder() has a bug
        # on the error checking. JIRA filed. Until then the
        # axes will be reversed from the original.

        # if transpose == True:
        #    new_order = np.arange(self.data.ndim)
        #    new_order = new_order[-1*np.arange(self.data.ndim)-1]
        #    print new_order
        #    self.casa_cs.reorder(new_order)

    # close the ia tool
    ia.close()

    meta = {'filename': filename,
            'BUNIT': unit}


    if wcs.naxis == 3:
        mask = BooleanArrayMask(np.logical_not(valid), wcs)
        cube = SpectralCube(data, wcs, mask, meta=meta)

    elif wcs.naxis == 4:
        data, wcs = cube_utils._split_stokes(data.T, wcs)
        mask = {}
        for component in data:
            data[component], wcs_slice = cube_utils._orient(data[component],
                                                            wcs)
            mask[component] = LazyMask(np.isfinite, data=data[component],
                                       wcs=wcs_slice)

        cube = StokesSpectralCube(data, wcs_slice, mask, meta=meta)

    return cube