def checkAll(self): """ Method to check the dtd structure to see if all expected nodes were found. Parameters ---------- None Returns ------- Boolean, whether or not all nodes were found """ #pp.pprint(self.entities) for i in self.entities: if not self.entities[i]["found"]: print self.xmlFile logging.info(str(i) + " not found") return False for a in self.entities[i]["attrib"]: if not self.entities[i]["attrib"][a]["found"]: print "2",self.xmlFile logging.info(str(i) + " " + str(a) + " not found") return False return True
def addBDPtoAT(self, bdp): """ Method to add a BDP to an AT. The AT is not specified, but the _taskid attribute of the BDP is used to identify the necessary AT. Parameters ---------- bdp : BDP Any valid BDP, to be added to an existing AT. Returns ------- None """ found = False cp = copy.deepcopy(bdp) # find the AT we need for at in self.tasks: # see if the ID's match if at._taskid == bdp._taskid: found = True # set the base directory of the BDP cp.baseDir(at.baseDir()) # add it to the correct slot at._bdp_out[at._bdp_out_map.index(cp._uid)] = cp break if not found: logging.info("##### Found orphaned BDP with type %s in file %s" % \ (bdp._type, bdp.xmlFile))
def get_mem(self): """ Read memory usage info from /proc/pid/status Return Virtual and Resident memory size in MBytes. """ global ostype if ostype == None: ostype = os.uname()[0].lower() logging.info("OSTYPE: %s" % ostype) scale = {'MB': 1024.0} lines = [] try: if ostype == 'linux': proc_status = '/proc/%d/status' % os.getpid() # linux only # open pseudo file /proc/<pid>/status t = open(proc_status) # get value from line e.g. 'VmRSS: 9999 kB\n' for it in t.readlines(): if 'VmSize' in it or 'VmRSS' in it : lines.append(it) t.close() else: proc = subprocess.Popen(['ps','-o', 'rss', '-o', 'vsz', '-o','pid', '-p',str(os.getpid())],stdout=subprocess.PIPE) proc_output = proc.communicate()[0].split('\n') proc_output_memory = proc_output[1] proc_output_memory = proc_output_memory.split() phys_mem = int(proc_output_memory[0])/1204 # to MB virtual_mem = int(proc_output_memory[1])/1024 except (IOError, OSError): if self.report: logging.timing(self.label + " Error: cannot read memory usage information.") return np.array([]) # parse the two lines mem = {} if(ostype != 'darwin'): for line in lines: words = line.strip().split() #print words[0], '===', words[1], '===', words[2] # get rid of the tailing ':' key = words[0][:-1] # convert from KB to MB scaled = float(words[1]) / scale['MB'] mem[key] = scaled else: mem['VmSize'] = virtual_mem mem['VmRSS'] = phys_mem return np.array([mem['VmSize'], mem['VmRSS']])
def characters(self, ch): """ Method called whenever characters are detected in an xml node This method does some dtd validation. This method is only called by the SAX parser iteself. Parameters ---------- ch : unicode characters Returns ------- None """ target = None char = str(ch).strip() if char.isspace() or not char: return # determine which class the data are getting writtrn to if self.inUtil: target = self.Util elif self.inBDP: target = self.BDP elif self.inAT: target = self.curAT elif self.inSummaryEntry: target = self.summaryEntry elif self.inSummary: target = self.summaryData else: target = self.admit # a list or dictionary has to be decoded if isinstance(self.type, list) or isinstance(self.type, dict) \ or isinstance(self.type, tuple) or isinstance(self.type, set) \ or isinstance(self.type, np.ndarray) or isinstance(self.type, str): if self.inflow: self.flowdata += char else: self.tempdata += char else: # check the version if self.name == "_version": ver = self.getattr(target, self.name) vercheck = utils.compareversions(ver, str(char)) if vercheck < 0: # newer read in logging.warning("Version mismatch for %s, data are a newer version than current software, attempting to continue." % target.getkey("_type")) elif vercheck > 0: # older read in logging.warning("Version mismatch for %s, data are an older version than current software, attempting to continue." % target.getkey("_type")) else: try: self.setattr(target, self.name, self.getData(char)) except AttributeError: logging.info("Data member %s is not a member of %s. This may be due to a version mismatch between the data and your software, attempting to continue." % (self.name, str(type(target)))) except: raise del ch
def map_to_slit(fname, clip=0.0, gamma=1.0): """take all values from a map over clip, compute best slit for PV Slice """ ia = taskinit.iatool() ia.open(fname) imshape = ia.shape() pix = ia.getchunk().squeeze() # this should now be a numpy pix[ix][iy] map pixmax = pix.max() pixrms = pix.std() if False: pix1 = pix.flatten() rpix = stats.robust(pix1) logging.debug("stats: mean: %g %g" % (pix1.mean(), rpix.mean())) logging.debug("stats: rms: %g %g" % (pix1.std(), rpix.std())) logging.debug("stats: max: %g %g" % (pix1.max(), rpix.max())) logging.debug('shape: %s %s %s' % (str(pix.shape), str(pix1.shape), str(imshape))) ia.close() nx = pix.shape[0] ny = pix.shape[1] x = np.arange(pix.shape[0]).reshape((nx, 1)) y = np.arange(pix.shape[1]).reshape((1, ny)) if clip > 0.0: nmax = nx * ny clip = clip * pixrms logging.debug("Using initial clip=%g for rms=%g" % (clip, pixrms)) m = ma.masked_less(pix, clip) while m.count() == 0: clip = 0.5 * clip logging.debug("no masking...trying lower clip=%g" % clip) m = ma.masked_less(pix, clip) else: logging.debug("Clip=%g now found %d/%d points" % (clip, m.count(), nmax)) else: #@ todo sigma-clipping with iterations? see also astropy.stats.sigma_clip() rpix = stats.robust(pix.flatten()) r_mean = rpix.mean() r_std = rpix.std() logging.info("ROBUST MAP mean/std: %f %f" % (r_mean, r_std)) m = ma.masked_less(pix, -clip * r_std) logging.debug("Found > clip=%g : %g" % (clip, m.count())) if m.count() == 0: logging.warning("Returning a dummy slit, no points above clip %g" % clip) edge = 3.0 #slit = [edge,0.5*ny,nx-1.0-edge,0.5*ny] # @todo file a bug, this failed # RuntimeError: (/var/rpmbuild/BUILD/casa-test/casa-test-4.5.7/code/imageanalysis/ImageAnalysis/PVGenerator.cc : 334) Failed AlwaysAssert abs( (endPixRot[0] - startPixRot[0]) - sqrt(xdiff*xdiff + ydiff*ydiff) ) < 1e-6 slit = [edge, 0.5 * ny - 0.1, nx - 1.0 - edge, 0.5 * ny + 0.1] else: slit = convert_to_slit(m, x, y, nx, ny, gamma) return (slit, clip)
def fitgauss1D(x, y, par=None, width=-1.0): """ Method for fitting a 1D gaussian to a spectral line Parameters ---------- x: array like The x co-ordinates of the spectrum, note the center of the spectral line should be near 0.0 if possible y: array like The y co-ordinates (intensity) of the spectrum par: array like The initial guesses for the fit parameters, the fitter works best if the center parameter is near 0.0 3 parameters: PeakY, CenterX, FWHM. width: float If positive, this is the assumed width (or step) in the x array, which is needed if only 1 point is given. Otherwise ignored. Returns ------- A tuple containing the best fit parameters (as a list) and the covariance of the parameters (also as a list) """ if len(x) == 3: logging.info( "Gaussian fit attempted with only three points, look at the covariance for goodness of fit." ) # if there are too few points to fit then just conserve the are of the channels to calculate the # parameters if len(x) < 3: logging.info( "Gaussian fit attempted with fewer than three points (%d). Using conservation of area method to determine parameters." % len(x)) params = fitgauss1Dm(x, y, dx=width) covar = [1000.] * len(params) else: try: params, covar = curve_fit(gaussian1D, x, y, p0=par) # if the covariance cannot be determined, just return the initial values except RuntimeError, e: if "Optimal" in str(e): params = par covar = [0] * len(par) # otherwise re-raise the exception else: raise
def test_info(self): msg = "unit_test_info_message" Alogging.info(msg) found = False r = open(self.logfile, 'r') for line in r.readlines(): if msg in line: if(self.verbose): print "\nFound message > ", line found = True r.close() break self.assertTrue(found)
def test_effectiveLevel(self): msg = "unit_test_levels_message" # check that the logging level is what is expected level = Alogging.getEffectiveLevel() self.assertTrue(level == self.level) # set the level to a new value and check again Alogging.setLevel(50) level = Alogging.getEffectiveLevel() self.assertTrue(level == 50) # log an info message which is below the logging level, this message should not appear # in the logs Alogging.info(msg) found = False r = open(self.logfile, 'r') for line in r.readlines(): if msg in line: if(self.verbose): print "\nFound message >", line found = True break r.close() self.assertFalse(found) Alogging.setLevel(self.level) # reset the logging level msg += "2" # log an info message, which is now above the logging level, this message should appear # in the logs Alogging.info(msg) found = False r = open(self.logfile, 'r') for line in r.readlines(): if msg in line: if(self.verbose): print "\nFound message >", line found = True r.close() break self.assertTrue(found)
def fitgauss1D(x, y, par=None, width=-1.0): """ Method for fitting a 1D gaussian to a spectral line Parameters ---------- x: array like The x co-ordinates of the spectrum, note the center of the spectral line should be near 0.0 if possible y: array like The y co-ordinates (intensity) of the spectrum par: array like The initial guesses for the fit parameters, the fitter works best if the center parameter is near 0.0 3 parameters: PeakY, CenterX, FWHM. width: float If positive, this is the assumed width (or step) in the x array, which is needed if only 1 point is given. Otherwise ignored. Returns ------- A tuple containing the best fit parameters (as a list) and the covariance of the parameters (also as a list) """ if len(x) == 3: logging.info("Gaussian fit attempted with only three points, look at the covariance for goodness of fit.") # if there are too few points to fit then just conserve the are of the channels to calculate the # parameters if len(x) < 3: logging.info("Gaussian fit attempted with fewer than three points (%d). Using conservation of area method to determine parameters." % len(x)) params = fitgauss1Dm(x,y,dx=width) covar = [1000.] * len(params) else: try: params, covar = curve_fit(gaussian1D, x, y, p0=par) # if the covariance cannot be determined, just return the initial values except RuntimeError, e: if "Optimal" in str(e): params = par covar = [0] * len(par) # otherwise re-raise the exception else: raise
def checkAttribute(self, name, attrib, value=None): """ Method to check an attribute for validity. Validity includes correct name and data type. Parameters ---------- name : str The name of the node being checked attrib : str The attribute of the node being checked, if any. value : str The type of the attribute being checked (e.g. bt.INT) Default: None """ # check an attribute for validity try: if not value in self.entities[name]["attrib"][attrib]["values"] \ and not "ANY" in self.entities[name]["attrib"][attrib]["values"]: raise Exception("DTDParser.checkAttributes: Value %s for attribute %s is not a valid entry (file %s)" % (value, name, self.xmlFile)) self.entities[name]["attrib"][attrib]["found"] = True except KeyError: logging.info("Attribute %s not listed in DTD, malformed xml detected (%s)" % (attrib, self.xmlFile)) logging.info("Inconsistency between dtd and xml detected, continuing") except: logging.info("Unknown error encountered while parsing attribute %s (%s)" % (attrib, self.xmlFile)) raise
def run(self): """ running the File_AT task """ # grab and check essential keywords filename = self.getkey('file') logging.info("file=%s" % filename) if len(filename) == 0: raise Exception,'File_AT: no file= given' exist = self.getkey('exist') if exist: # logging.warning("no checking now") # self._bdp_in[0].checkfiles() # create the BDP bdp1 = File_BDP(filename) bdp1.filename = filename self.addoutput(bdp1) # touch the file if desired if self.getkey('touch'): bdp1.touch()
def run(self): """ running the File_AT task """ # grab and check essential keywords filename = self.getkey('file') logging.info("file=%s" % filename) if len(filename) == 0: raise Exception, 'File_AT: no file= given' exist = self.getkey('exist') if exist: # logging.warning("no checking now") # self._bdp_in[0].checkfiles() # create the BDP bdp1 = File_BDP(filename) bdp1.filename = filename self.addoutput(bdp1) # touch the file if desired if self.getkey('touch'): bdp1.touch()
def check(self, name, attrib=None, value=None): """ Method to check a node for validity. Validity includes correct name and data type. Parameters ---------- name : str The name of the node being checked attrib : str The attribute of the node being checked, if any. Default: None value : str The type of the attribute being checked (e.g. bt.INT) Default: None """ # check a node for validity try: # note that the node has been found self.entities[name]["found"] = True # if there is an attribute specified then check it too # if the attribute was not expected just print a note to the screen if attrib is not None: try: if not value in self.entities[name]["attrib"][attrib]["values"] \ and not "ANY" in self.entities[name]["attrib"][attrib]["values"]: raise Exception("DTDParser.check: Value %s for attribute %s is not a valid entry (attribute = %s) (file %s)" % (value, name, attrib, self.xmlFile)) self.entities[name]["attrib"][attrib]["found"] = True except KeyError: logging.info("Attribute %s for %s not listed in DTD, malformed xml detected (%s)" % (attrib, name, self.xmlFile)) logging.info("Inconsistency between dtd and xml detected, continuing") except: logging.info("Unknown error encountered while parsing attribute %s for %s (%s)" % (attrib, name, self.xmlFile)) raise except KeyError: logging.info("Data member %s is not a member of the dtd, xml inconsistent with definition (%s)" % (name, self.xmlFile)) except: raise
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ dt = utils.Dtime("Export") # tagging time basename = self.getkey("basename") nbdp = len(self._bdp_in) logging.info("Found %d input BDPs" % nbdp) if nbdp > 1: logging.info("Only dealing with 1 BDP now") b1 = self._bdp_in[0] # image/cube infile = b1.getimagefile(bt.CASA) # ADMIT filename of the image (cube) if len(basename) == 0: fitsname = self.mkext( infile, 'fits' ) # morph to the new output name with replaced extension ' image_out = self.dir(fitsname) # absolute filename else: if basename[0:2] == './' or basename[0] == '/': image_out = basename + ".fits" else: image_out = self.dir(basename + ".fits") dt.tag("start") logging.info("Writing FITS %s" % image_out) # @todo check self.dir(image_out) casa.exportfits(self.dir(infile), image_out, overwrite=True) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ dt = utils.Dtime("Export") # tagging time basename = self.getkey("basename") nbdp = len(self._bdp_in) logging.info("Found %d input BDPs" % nbdp) if nbdp > 1: logging.info("Only dealing with 1 BDP now") b1 = self._bdp_in[0] # image/cube infile = b1.getimagefile(bt.CASA) # ADMIT filename of the image (cube) if len(basename) == 0: fitsname = self.mkext(infile,'fits') # morph to the new output name with replaced extension ' image_out = self.dir(fitsname) # absolute filename else: if basename[0:2] == './' or basename[0] == '/': image_out = basename + ".fits" else: image_out = self.dir(basename + ".fits") dt.tag("start") logging.info("Writing FITS %s" % image_out) # @todo check self.dir(image_out) casa.exportfits(self.dir(infile), image_out, overwrite=True) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP. Parameters ---------- None Returns ------- None """ dt = utils.Dtime("ContinuumSub") # tagging time self._summary = {} # an ADMIT summary will be created here contsub = self.getkey("contsub") pad = self.getkey("pad") fitorder = self.getkey("fitorder") # x.im -> x.cim + x.lim # b1 = input spw BDP # b1a = optional input {Segment,Line}List # b1b = optional input Cont Map (now deprecated) # b2 = output line cube # b3 = output cont map b1 = self._bdp_in[0] f1 = b1.getimagefile(bt.CASA) b1a = self._bdp_in[1] # b1b = self._bdp_in[2] b1b = None # do not allow continuum maps to be input f2 = self.mkext(f1, 'lim') f3 = self.mkext(f1, 'cim') f3a = self.mkext(f1, 'cim3d') # temporary cube name, map is needed b2 = SpwCube_BDP(f2) b3 = Image_BDP(f3) self.addoutput(b2) self.addoutput(b3) taskinit.ia.open(self.dir(f1)) s = taskinit.ia.summary() nchan = s['shape'][ 2] # ingest has guarenteed this to the spectral axis if b1a != None: # if a LineList was given, use that if len(b1a.table) > 0: # this section of code actually works for len(ch0)==0 as well # ch0 = b1a.table.getFullColumnByName("startchan") ch1 = b1a.table.getFullColumnByName("endchan") if pad != 0: # can widen or narrow the segments if pad > 0: logging.info("pad=%d to widen the segments" % pad) else: logging.info("pad=%d to narrow the segments" % pad) ch0 = np.where(ch0 - pad < 0, 0, ch0 - pad) ch1 = np.where(ch1 + pad >= nchan, nchan - 1, ch1 + pad) s = Segments(ch0, ch1, nchan=nchan) ch = s.getchannels( True) # take the complement of lines as the continuum else: ch = range( nchan ) # no lines? take everything as continuum (probably bad) logging.warning( "All channels taken as continuum. Are you sure?") elif len(contsub) > 0: # else if contsub[] was supplied manually s = Segments(contsub, nchan=nchan) ch = s.getchannels() else: raise Exception, "No contsub= or input LineList given" if len(ch) > 0: taskinit.ia.open(self.dir(f1)) taskinit.ia.continuumsub(outline=self.dir(f2), outcont=self.dir(f3a), channels=ch, fitorder=fitorder) taskinit.ia.close() dt.tag("continuumsub") casa.immoments( self.dir(f3a), -1, outfile=self.dir(f3)) # mean of the continuum cube (f3a) utils.remove(self.dir(f3a)) # is the continuum map (f3) dt.tag("immoments") if b1b != None: # this option is now deprecated (see above, by setting b1b = None), no user option allowed # there is likely a mis-match in the beam, given how they are produced. So it's safer to # remove this here, and force the flow to smooth manually print "Adding back in a continuum map" f1b = b1b.getimagefile(bt.CASA) f1c = self.mkext(f1, 'sum') # @todo notice we are not checking for conforming mapsize and WCS # and let CASA fail out if we've been bad. casa.immath([self.dir(f3), self.dir(f1b)], 'evalexpr', self.dir(f1c), 'IM0+IM1') utils.rename(self.dir(f1c), self.dir(f3)) dt.tag("immath") else: raise Exception, "No channels left to determine continuum. pad=%d too large?" % pad # regression rdata = casautil.getdata(self.dir(f3)).data logging.regression("CSUB: %f %f" % (rdata.min(), rdata.max())) # Create two output images for html and their thumbnails, too implot = ImPlot(ptype=self._plot_type, pmode=self._plot_mode, abspath=self.dir()) implot.plotter(rasterfile=f3, figname=f3, colorwedge=True) figname = implot.getFigure(figno=implot.figno, relative=True) thumbname = implot.getThumbnail(figno=implot.figno, relative=True) b2.setkey("image", Image(images={bt.CASA: f2})) b3.setkey("image", Image(images={bt.CASA: f3, bt.PNG: figname})) dt.tag("implot") if len(ch) > 0: taskargs = "pad=%d fitorder=%d contsub=%s" % (pad, fitorder, str(contsub)) imcaption = "Continuum map" self._summary["continuumsub"] = SummaryEntry( [figname, thumbname, imcaption], "ContinuumSub_AT", self.id(True), taskargs) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP. Parameters ---------- None Returns ------- None """ dt = utils.Dtime("ContinuumSub") # tagging time self._summary = {} # an ADMIT summary will be created here contsub = self.getkey("contsub") pad = self.getkey("pad") fitorder = self.getkey("fitorder") # x.im -> x.cim + x.lim # b1 = input spw BDP # b1a = optional input {Segment,Line}List # b1b = optional input Cont Map (now deprecated) # b2 = output line cube # b3 = output cont map b1 = self._bdp_in[0] f1 = b1.getimagefile(bt.CASA) b1a = self._bdp_in[1] # b1b = self._bdp_in[2] b1b = None # do not allow continuum maps to be input f2 = self.mkext(f1,'lim') f3 = self.mkext(f1,'cim') f3a = self.mkext(f1,'cim3d') # temporary cube name, map is needed b2 = SpwCube_BDP(f2) b3 = Image_BDP(f3) self.addoutput(b2) self.addoutput(b3) taskinit.ia.open(self.dir(f1)) s = taskinit.ia.summary() nchan = s['shape'][2] # ingest has guarenteed this to the spectral axis if b1a != None: # if a LineList was given, use that if len(b1a.table) > 0: # this section of code actually works for len(ch0)==0 as well # ch0 = b1a.table.getFullColumnByName("startchan") ch1 = b1a.table.getFullColumnByName("endchan") if pad != 0: # can widen or narrow the segments if pad > 0: logging.info("pad=%d to widen the segments" % pad) else: logging.info("pad=%d to narrow the segments" % pad) ch0 = np.where(ch0-pad < 0, 0, ch0-pad) ch1 = np.where(ch1+pad >= nchan, nchan-1, ch1+pad) s = Segments(ch0,ch1,nchan=nchan) ch = s.getchannels(True) # take the complement of lines as the continuum else: ch = range(nchan) # no lines? take everything as continuum (probably bad) logging.warning("All channels taken as continuum. Are you sure?") elif len(contsub) > 0: # else if contsub[] was supplied manually s = Segments(contsub,nchan=nchan) ch = s.getchannels() else: raise Exception,"No contsub= or input LineList given" if len(ch) > 0: taskinit.ia.open(self.dir(f1)) taskinit.ia.continuumsub(outline=self.dir(f2),outcont=self.dir(f3a),channels=ch,fitorder=fitorder) taskinit.ia.close() dt.tag("continuumsub") casa.immoments(self.dir(f3a),-1,outfile=self.dir(f3)) # mean of the continuum cube (f3a) utils.remove(self.dir(f3a)) # is the continuum map (f3) dt.tag("immoments") if b1b != None: # this option is now deprecated (see above, by setting b1b = None), no user option allowed # there is likely a mis-match in the beam, given how they are produced. So it's safer to # remove this here, and force the flow to smooth manually print "Adding back in a continuum map" f1b = b1b.getimagefile(bt.CASA) f1c = self.mkext(f1,'sum') # @todo notice we are not checking for conforming mapsize and WCS # and let CASA fail out if we've been bad. casa.immath([self.dir(f3),self.dir(f1b)],'evalexpr',self.dir(f1c),'IM0+IM1') utils.rename(self.dir(f1c),self.dir(f3)) dt.tag("immath") else: raise Exception,"No channels left to determine continuum. pad=%d too large?" % pad # regression rdata = casautil.getdata(self.dir(f3)).data logging.regression("CSUB: %f %f" % (rdata.min(),rdata.max())) # Create two output images for html and their thumbnails, too implot = ImPlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir()) implot.plotter(rasterfile=f3,figname=f3,colorwedge=True) figname = implot.getFigure(figno=implot.figno,relative=True) thumbname = implot.getThumbnail(figno=implot.figno,relative=True) b2.setkey("image", Image(images={bt.CASA:f2})) b3.setkey("image", Image(images={bt.CASA:f3, bt.PNG : figname})) dt.tag("implot") if len(ch) > 0: taskargs = "pad=%d fitorder=%d contsub=%s" % (pad,fitorder,str(contsub)) imcaption = "Continuum map" self._summary["continuumsub"] = SummaryEntry([figname,thumbname,imcaption],"ContinuumSub_AT",self.id(True),taskargs) dt.tag("done") dt.end()
def run(self): """ Main program for OverlapIntegral """ dt = utils.Dtime("OverlapIntegral") self._summary = {} chans =self.getkey("chans") cmap = self.getkey("cmap") normalize = self.getkey("normalize") doCross = True doCross = False myplot = APlot(pmode=self._plot_mode,ptype=self._plot_type,abspath=self.dir()) dt.tag("start") n = len(self._bdp_in) if n==0: raise Exception,"Need at least 1 input Image_BDP " logging.debug("Processing %d input maps" % n) data = range(n) # array in which each element is placeholder for the data mdata = range(n) # array to hold the max in each array summarytable = admit.util.Table() summarytable.columns = ["File name","Spectral Line ID"] summarytable.description = "Images used in Overlap Integral" for i in range(n): bdpfile = self._bdp_in[i].getimagefile(bt.CASA) if hasattr(self._bdp_in[i],"line"): line = getattr(self._bdp_in[i],"line") logging.info("Map %d: %s" % (i,line.uid)) lineid = line.uid else: lineid="no line" data[i] = casautil.getdata(self.dir(bdpfile),chans) mdata[i] = data[i].max() logging.info("shape[%d] = %s with %d good data" % (i,data[i].shape,data[i].count())) if i==0: shape = data[i].shape outfile = self.mkext("testOI","oi") else: if shape != data[i].shape: raise Exception,"Shapes not the same, cannot overlap them" # collect the file names and line identifications for the summary summarytable.addRow([bdpfile,lineid]) logging.regression("OI: %s" % str(mdata)) if len(shape)>2 and shape[2] > 1: raise Exception,"Cannot handle 3D cubes yet" if doCross: # debug: produce all cross-corr's of the N input maps (expensive!) crossn(data, myplot) dt.tag("crossn") b1 = Image_BDP(outfile) self.addoutput(b1) b1.setkey("image", Image(images={bt.CASA:outfile})) dt.tag("open") useClone = True # to create an output dataset, clone the first input, but using the chans=ch0~ch1 # e.g. using imsubimage(infile,outfile=,chans= if len(chans) > 0: # ia.regrid() doesn't have the chans= taskinit.ia.open(self.dir(self._bdp_in[0].getimagefile(bt.CASA))) taskinit.ia.regrid(outfile=self.dir(outfile)) taskinit.ia.close() else: # 2D for now if not useClone: logging.info("OVERLAP out=%s" % outfile) taskinit.ia.fromimage(infile=self.dir(self._bdp_in[0].getimagefile(bt.CASA)), outfile=self.dir(outfile), overwrite=True) taskinit.ia.close() dt.tag("fromimage") if n==3: # RGB logging.info("RGB mode") out = rgb1(data[0],data[1],data[2], normalize) else: # simple sum out = data[0] for i in range(1,n): out = out + data[i] if useClone: casautil.putdata_raw(self.dir(outfile),out,clone=self.dir(self._bdp_in[0].getimagefile(bt.CASA))) else: taskinit.ia.open(self.dir(outfile)) s1 = taskinit.ia.shape() s0 = [0,0,0,0] r1 = taskinit.rg.box(blc=s0,trc=s1) pixeldata = out.data pixelmask = ~out.mask taskinit.ia.putregion(pixels=pixeldata, pixelmask=pixelmask, region=r1) taskinit.ia.close() title = "OverlapIntegral" pdata = np.rot90(out.squeeze()) logging.info("PDATA: %s" % str(pdata.shape)) myplot.map1(pdata,title,"testOI",thumbnail=True,cmap=cmap) #----------------------------- # Populate summary information #----------------------------- taskargs = "chans=%s cmap=%s" % (chans, cmap) imname = "" thumbnailname = "" # uncomment when ready. imname = myplot.getFigure(figno=myplot.figno,relative=True) thumbnailname = myplot.getThumbnail(figno=myplot.figno,relative=True) #@todo fill in caption with more info - line names, etc. caption = "Need descriptive caption here" summaryinfo = [summarytable.serialize(),imname,thumbnailname,caption] self._summary["overlap"] = SummaryEntry(summaryinfo, "OverlapIntegral_AT", self.id(True),taskargs) #----------------------------- dt.tag("done") dt.end()
def run(self): """Runs the task. Parameters ---------- None Returns ------- None """ self._summary = {} dt = utils.Dtime("CubeStats") #maxvrms = 2.0 # maximum variation in rms allowed (hardcoded for now) #maxvrms = -1.0 # turn maximum variation in rms allowed off maxvrms = self.getkey("maxvrms") psample = -1 psample = self.getkey("psample") # BDP's used : # b1 = input BDP # b2 = output BDP b1 = self._bdp_in[0] fin = b1.getimagefile(bt.CASA) bdp_name = self.mkext(fin,'cst') b2 = CubeStats_BDP(bdp_name) self.addoutput(b2) # PeakPointPlot use_ppp = self.getkey("ppp") # peakstats: not enabled for mortal users yet # peakstats = (psample=1, numsigma=4, minchan=3, maxgap=2, peakfit=False) pnumsigma = 4 minchan = 3 maxgap = 2 peakfit = False # True will enable a true gaussian fit # numsigma: adding all signal > numsigma ; not user enabled; for peaksum. numsigma = -1.0 numsigma = 3.0 # grab the new robust statistics. If this is used, 'rms' will be the RMS, # else we will use RMS = 1.4826*MAD (MAD does a decent job on outliers as well) # and was the only method available before CASA 4.4 when robust was implemented robust = self.getkey("robust") rargs = casautil.parse_robust(robust) nrargs = len(rargs) if nrargs == 0: sumrargs = "medabsdevmed" # for the summary, indicate the default robust else: sumrargs = str(rargs) self._summary["rmsmethd"] = SummaryEntry([sumrargs,fin],"CubeStats_AT",self.id(True)) #@todo think about using this instead of putting 'fin' in all the SummaryEntry #self._summary["casaimage"] = SummaryEntry(fin,"CubeStats_AT",self.id(True)) # extra CASA call to get the freq's in GHz, as these are not in imstat1{} # @todo what if the coordinates are not in FREQ ? # Note: CAS-7648 bug on 3D cubes if False: # csys method ia.open(self.dir(fin)) csys = ia.coordsys() spec_axis = csys.findaxisbyname("spectral") # ieck, we need a valid position, or else it will come back and "Exception: All selected pixels are masked" #freqs = ia.getprofile(spec_axis, region=rg.box([0,0],[0,0]))['coords']/1e9 #freqs = ia.getprofile(spec_axis)['coords']/1e9 freqs = ia.getprofile(spec_axis,unit="GHz")['coords'] dt.tag("getprofile") else: # old imval method #imval0 = casa.imval(self.dir(fin),box='0,0,0,0') # this fails on 3D imval0 = casa.imval(self.dir(fin)) freqs = imval0['coords'].transpose()[2]/1e9 dt.tag("imval") nchan = len(freqs) chans = np.arange(nchan) # call CASA to get what we want # imstat0 is the whole cube, imstat1 the plane based statistics # warning: certain robust stats (**rargs) on the whole cube are going to be very slow dt.tag("start") imstat0 = casa.imstat(self.dir(fin), logfile=self.dir('imstat0.logfile'),append=False,**rargs) dt.tag("imstat0") imstat1 = casa.imstat(self.dir(fin),axes=[0,1],logfile=self.dir('imstat1.logfile'),append=False,**rargs) dt.tag("imstat1") # imm = casa.immoments(self.dir(fin),axis='spec', moments=8, outfile=self.dir('ppp.im')) if nrargs > 0: # need to get the peaks without rubust imstat10 = casa.imstat(self.dir(fin), logfile=self.dir('imstat0.logfile'),append=True) dt.tag("imstat10") imstat11 = casa.imstat(self.dir(fin),axes=[0,1],logfile=self.dir('imstat1.logfile'),append=True) dt.tag("imstat11") # grab the relevant plane-based things from imstat1 if nrargs == 0: mean = imstat1["mean"] sigma = imstat1["medabsdevmed"]*1.4826 # see also: astropy.stats.median_absolute_deviation() peakval = imstat1["max"] minval = imstat1["min"] else: mean = imstat1["mean"] sigma = imstat1["rms"] peakval = imstat11["max"] minval = imstat11["min"] if True: # work around a bug in imstat(axes=[0,1]) for last channel [CAS-7697] for i in range(len(sigma)): if sigma[i] == 0.0: minval[i] = peakval[i] = 0.0 # too many variations in the RMS ? sigma_pos = sigma[np.where(sigma>0)] smin = sigma_pos.min() smax = sigma_pos.max() logging.info("sigma varies from %f to %f; %d/%d channels ok" % (smin,smax,len(sigma_pos),len(sigma))) if maxvrms > 0: if smax/smin > maxvrms: cliprms = smin * maxvrms logging.warning("sigma varies too much, going to clip to %g (%g > %g)" % (cliprms, smax/smin, maxvrms)) sigma = np.where(sigma < cliprms, sigma, cliprms) # @todo (and check again) for foobar.fits all sigma's became 0 when robust was selected # was this with mask=True/False? # PeakPointPlot (can be expensive, hence the option) if use_ppp: logging.info("Computing MaxPos for PeakPointPlot") xpos = np.zeros(nchan) ypos = np.zeros(nchan) peaksum = np.zeros(nchan) ia.open(self.dir(fin)) for i in range(nchan): if sigma[i] > 0.0: plane = ia.getchunk(blc=[0,0,i,-1],trc=[-1,-1,i,-1],dropdeg=True) v = ma.masked_invalid(plane) v_abs = np.absolute(v) max = np.unravel_index(v_abs.argmax(), v_abs.shape) xpos[i] = max[0] ypos[i] = max[1] if numsigma > 0.0: peaksum[i] = ma.masked_less(v,numsigma * sigma[i]).sum() peaksum = np.nan_to_num(peaksum) # put 0's where nan's are found ia.close() dt.tag("ppp") nzeros = len(np.where(sigma<=0.0)) if nzeros > 0: zeroch = np.where(sigma<=0.0) logging.warning("There are %d fully masked channels (%s)" % (nzeros,str(zeroch))) # construct the admit Table for CubeStats_BDP # note data needs to be a tuple, later to be column_stack'd if use_ppp: labels = ["channel" ,"frequency" ,"mean" ,"sigma" ,"max" ,"maxposx" ,"maxposy" ,"min", "peaksum"] units = ["number" ,"GHz" ,"Jy/beam" ,"Jy/beam" ,"Jy/beam" ,"number" ,"number" ,"Jy/beam", "Jy"] data = (chans ,freqs ,mean ,sigma ,peakval ,xpos ,ypos ,minval, peaksum) else: labels = ["channel" ,"frequency" ,"mean" ,"sigma" ,"max" ,"min"] units = ["number" ,"GHz" ,"Jy/beam" ,"Jy/beam" ,"Jy/beam" ,"Jy/beam"] data = (chans ,freqs ,mean ,sigma ,peakval ,minval) table = Table(columns=labels,units=units,data=np.column_stack(data)) b2.setkey("table",table) # get the full cube statistics, it depends if robust was pre-selected if nrargs == 0: mean0 = imstat0["mean"][0] sigma0 = imstat0["medabsdevmed"][0]*1.4826 peak0 = imstat0["max"][0] b2.setkey("mean" , float(mean0)) b2.setkey("sigma", float(sigma0)) b2.setkey("minval",float(imstat0["min"][0])) b2.setkey("maxval",float(imstat0["max"][0])) b2.setkey("minpos",imstat0["minpos"][:3].tolist()) #? [] or array(..dtype=int32) ?? b2.setkey("maxpos",imstat0["maxpos"][:3].tolist()) #? [] or array(..dtype=int32) ?? logging.info("CubeMax: %f @ %s" % (imstat0["max"][0],str(imstat0["maxpos"]))) logging.info("CubeMin: %f @ %s" % (imstat0["min"][0],str(imstat0["minpos"]))) logging.info("CubeRMS: %f" % sigma0) else: mean0 = imstat0["mean"][0] sigma0 = imstat0["rms"][0] peak0 = imstat10["max"][0] b2.setkey("mean" , float(mean0)) b2.setkey("sigma", float(sigma0)) b2.setkey("minval",float(imstat10["min"][0])) b2.setkey("maxval",float(imstat10["max"][0])) b2.setkey("minpos",imstat10["minpos"][:3].tolist()) #? [] or array(..dtype=int32) ?? b2.setkey("maxpos",imstat10["maxpos"][:3].tolist()) #? [] or array(..dtype=int32) ?? logging.info("CubeMax: %f @ %s" % (imstat10["max"][0],str(imstat10["maxpos"]))) logging.info("CubeMin: %f @ %s" % (imstat10["min"][0],str(imstat10["minpos"]))) logging.info("CubeRMS: %f" % sigma0) b2.setkey("robust",robust) rms_ratio = imstat0["rms"][0]/sigma0 logging.info("RMS Sanity check %f" % rms_ratio) if rms_ratio > 1.5: logging.warning("RMS sanity check = %f. Either bad sidelobes, lotsa signal, or both" % rms_ratio) logging.regression("CST: %f %f" % (sigma0, rms_ratio)) # plots: no plots need to be made when nchan=1 for continuum # however we could make a histogram, overlaying the "best" gauss so # signal deviations are clear? logging.info('mean,rms,S/N=%f %f %f' % (mean0,sigma0,peak0/sigma0)) if nchan == 1: # for a continuum/1-channel we only need to stuff some numbers into the _summary self._summary["chanrms"] = SummaryEntry([float(sigma0), fin], "CubeStats_AT", self.id(True)) self._summary["dynrange"] = SummaryEntry([float(peak0)/float(sigma0), fin], "CubeStats_AT", self.id(True)) self._summary["datamean"] = SummaryEntry([float(mean0), fin], "CubeStats_AT", self.id(True)) else: y1 = np.log10(ma.masked_invalid(peakval)) y2 = np.log10(ma.masked_invalid(sigma)) y3 = y1-y2 y4 = np.log10(ma.masked_invalid(-minval)) y5 = y1-y4 y = [y1,y2,y3,y4] title = 'CubeStats: ' + bdp_name+'_0' xlab = 'Channel' ylab = 'log(Peak,Noise,Peak/Noise)' labels = ['log(peak)','log(rms noise)','log(peak/noise)','log(|minval|)'] myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir()) segp = [[chans[0],chans[nchan-1],math.log10(sigma0),math.log10(sigma0)]] myplot.plotter(chans,y,title,bdp_name+"_0",xlab=xlab,ylab=ylab,segments=segp,labels=labels,thumbnail=True) imfile = myplot.getFigure(figno=myplot.figno,relative=True) thumbfile = myplot.getThumbnail(figno=myplot.figno,relative=True) image0 = Image(images={bt.PNG:imfile},thumbnail=thumbfile,thumbnailtype=bt.PNG,description="CubeStats_0") b2.addimage(image0,"im0") if use_ppp: # new trial for Lee title = 'PeakSum: (numsigma=%.1f)' % (numsigma) ylab = 'Jy*N_ppb' myplot.plotter(chans,[peaksum],title,bdp_name+"_00",xlab=xlab,ylab=ylab,thumbnail=False) if True: # hack ascii table y30 = np.where(sigma > 0, np.log10(peakval/sigma), 0.0) table2 = Table(columns=["freq","log(P/N)"],data=np.column_stack((freqs,y30))) table2.exportTable(self.dir("testCubeStats.tab")) del table2 # the "box" for the "spectrum" is all pixels. Don't know how to # get this except via shape. ia.open(self.dir(fin)) s = ia.summary() ia.close() if 'shape' in s: specbox = (0,0,s['shape'][0],s['shape'][1]) else: specbox = () caption = "Emission characteristics as a function of channel, as derived by CubeStats_AT " caption += "(cyan: global rms," caption += " green: noise per channel," caption += " blue: peak value per channel," caption += " red: peak/noise per channel)." self._summary["spectra"] = SummaryEntry([0, 0, str(specbox), 'Channel', imfile, thumbfile , caption, fin], "CubeStats_AT", self.id(True)) self._summary["chanrms"] = SummaryEntry([float(sigma0), fin], "CubeStats_AT", self.id(True)) # @todo Will imstat["max"][0] always be equal to s['datamax']? If not, why not? if 'datamax' in s: self._summary["dynrange"] = SummaryEntry([float(s['datamax']/sigma0), fin], "CubeStats_AT", self.id(True)) else: self._summary["dynrange"] = SummaryEntry([float(imstat0["max"][0]/sigma0), fin], "CubeStats_AT", self.id(True)) self._summary["datamean"] = SummaryEntry([imstat0["mean"][0], fin], "CubeStats_AT", self.id(True)) title = bdp_name + "_1" xlab = 'log(Peak,Noise,P/N)' myplot.histogram([y1,y2,y3],title,bdp_name+"_1",xlab=xlab,thumbnail=True) imfile = myplot.getFigure(figno=myplot.figno,relative=True) thumbfile = myplot.getThumbnail(figno=myplot.figno,relative=True) image1 = Image(images={bt.PNG:imfile},thumbnail=thumbfile,thumbnailtype=bt.PNG,description="CubeStats_1") b2.addimage(image1,"im1") # note that the 'y2' can have been clipped, which can throw off stats.robust() # @todo should set a mask for those. title = bdp_name + "_2" xlab = 'log(Noise))' n = len(y2) ry2 = stats.robust(y2) y2_mean = ry2.mean() y2_std = ry2.std() if n>9: logging.debug("NORMALTEST2: %s" % str(scipy.stats.normaltest(ry2))) myplot.hisplot(y2,title,bdp_name+"_2",xlab=xlab,gauss=[y2_mean,y2_std],thumbnail=True) title = bdp_name + "_3" xlab = 'log(diff[Noise])' n = len(y2) # dy2 = y2[0:-2] - y2[1:-1] dy2 = ma.masked_equal(y2[0:-2] - y2[1:-1],0.0).compressed() rdy2 = stats.robust(dy2) dy2_mean = rdy2.mean() dy2_std = rdy2.std() if n>9: logging.debug("NORMALTEST3: %s" % str(scipy.stats.normaltest(rdy2))) myplot.hisplot(dy2,title,bdp_name+"_3",xlab=xlab,gauss=[dy2_mean,dy2_std],thumbnail=True) title = bdp_name + "_4" xlab = 'log(Signal/Noise))' n = len(y3) ry3 = stats.robust(y3) y3_mean = ry3.mean() y3_std = ry3.std() if n>9: logging.debug("NORMALTEST4: %s" % str(scipy.stats.normaltest(ry3))) myplot.hisplot(y3,title,bdp_name+"_4",xlab=xlab,gauss=[y3_mean,y3_std],thumbnail=True) title = bdp_name + "_5" xlab = 'log(diff[Signal/Noise)])' n = len(y3) dy3 = y3[0:-2] - y3[1:-1] rdy3 = stats.robust(dy3) dy3_mean = rdy3.mean() dy3_std = rdy3.std() if n>9: logging.debug("NORMALTEST5: %s" % str(scipy.stats.normaltest(rdy3))) myplot.hisplot(dy3,title,bdp_name+"_5",xlab=xlab,gauss=[dy3_mean,dy3_std],thumbnail=True) title = bdp_name + "_6" xlab = 'log(Peak+Min)' n = len(y1) ry5 = stats.robust(y5) y5_mean = ry5.mean() y5_std = ry5.std() if n>9: logging.debug("NORMALTEST6: %s" % str(scipy.stats.normaltest(ry5))) myplot.hisplot(y5,title,bdp_name+"_6",xlab=xlab,gauss=[y5_mean,y5_std],thumbnail=True) logging.debug("LogPeak: m,s= %f %f min/max %f %f" % (y1.mean(),y1.std(),y1.min(),y1.max())) logging.debug("LogNoise: m,s= %f %f %f %f min/max %f %f" % (y2.mean(),y2.std(),y2_mean,y2_std,y2.min(),y2.max())) logging.debug("LogDeltaNoise: RMS/sqrt(2)= %f %f " % (dy2.std()/math.sqrt(2),dy2_std/math.sqrt(2))) logging.debug("LogDeltaP/N: RMS/sqrt(2)= %f %f" % (dy3.std()/math.sqrt(2),dy3_std/math.sqrt(2))) logging.debug("LogPeak+Min: robust m,s= %f %f" % (y5_mean,y5_std)) # compute two ratios that should both be near 1.0 if noise is 'normal' ratio = y2.std()/(dy2.std()/math.sqrt(2)) ratio2 = y2_std/(dy2_std/math.sqrt(2)) logging.info("RMS BAD VARIATION RATIO: %f %f" % (ratio,ratio2)) # making PPP plot if nchan > 1 and use_ppp: smax = 10 gamma = 0.75 z0 = peakval/peakval.max() # point sizes s = np.pi * ( smax * (z0**gamma) )**2 cmds = ["grid", "axis equal"] title = "Peak Points per channel" pppimage = bdp_name + '_ppp' myplot.scatter(xpos,ypos,title=title,figname=pppimage,size=s,color=chans,cmds=cmds,thumbnail=True) pppimage = myplot.getFigure(figno=myplot.figno,relative=True) pppthumbnail = myplot.getThumbnail(figno=myplot.figno,relative=True) caption = "Peak point plot: Locations of per-channel peaks in the image cube " + fin self._summary["peakpnt"] = SummaryEntry([pppimage, pppthumbnail, caption, fin], "CubeStats_AT", self.id(True)) dt.tag("plotting") # making PeakStats plot if nchan > 1 and psample > 0: logging.info("Computing peakstats") # grab peak,mean and width values for all peaks (pval,mval,wval) = peakstats(self.dir(fin),freqs,sigma0,pnumsigma,minchan,maxgap,psample,peakfit) title = "PeakStats: cutoff = %g" % (sigma0*pnumsigma) xlab = 'Peak value' ylab = 'FWHM (channels)' pppimage = bdp_name + '_peakstats' cval = mval myplot.scatter(pval,wval,title=title,xlab=xlab,ylab=ylab,color=cval,figname=pppimage,thumbnail=False) dt.tag("peakstats") # myplot.final() # pjt debug # all done! dt.tag("done") taskargs = "robust=" + sumrargs if use_ppp: taskargs = taskargs + " ppp=True" else: taskargs = taskargs + " ppp=False" for v in self._summary: self._summary[v].setTaskArgs(taskargs) dt.tag("summary") dt.end()
def run(self,**keyval): """ Method to calculate the continuum from the given masked spectrum. If search=True is given as an argument then the algorithm will iterate through the different order splines to find the best fit, based on noise level. Parameters ---------- keyval : dictionary Dictionary containing the keyword value pair arguments Returns ------- numpy array containing the best fit continuum Notes ----- Arguments for the run method: - search : bool, whether or not to search for the best fit. Default: False - deg : int, the degree of polynomial to use, Defualt: 1 """ # set up the data elements args = {"x": self.x, "y" : ma.fix_invalid(self.y,fill_value=0.0), # reverse the weights since a masked array uses True for good values # and UnivariateSpline needs a number. The reversal translates the # True values to False, which are then interpreted as 0.0 "w" : -self.y.mask} # get the given arguments search = False noise = None chisq = 3.0 if "search" in keyval: search = keyval["search"] if "noise" in keyval: noise = keyval["noise"] if "chisq" in keyval: maxchisq = keyval["chisq"] for arg in ["deg"]: if arg in keyval: args[arg] = keyval[arg] # if searching for the best fit # limited to 3rd order as 4th and 5th order could fit weak wide lines if search: chisq = {0:1000., 1:1000., 2:1000., 3:1000.} # iterate over each possible order for order in chisq: args["deg"] = order pfit = np.polyfit(**args) if len(pfit) == 1: numpar = 1 else: # find the number of free parameters # note that if a coefficient is << the max coefficient # it is not considered a free parameter as it has very little affect on the fit numpar = 0 mxpar = max(abs(pfit)) for i in range(len(pfit)): if abs(mxpar/pfit[i]) < 1000.: numpar += 1 fit = np.polyval(pfit,self.x) chisq[order] = (stats.reducedchisquared(self.y, fit, numpar, noise), numpar) # find the base fit, based on number of free parameters and chisq mv = 1000. order = 0 for k in chisq: if chisq[k][0] < mv and (mv - chisq[k][0]) / mv > 0.2: mv = chisq[k][0] order = k if mv > maxchisq: logging.warning("No good fit for continuum found") return None args["deg"] = mv logging.info("Using polynomial fit of order %i with chi^2 of %f" % (order, mv)) # do the final fit pfit = np.polyfit(**args) fit = np.polyval(pfit,self.x) else: # do the fit with the given parameters pfit = ma.polyfit(**args) fit = np.polyval(pfit,self.x) return fit
def run(self): """Runs the task. Parameters ---------- None Returns ------- None """ self._summary = {} pvslicesummary = [] sumslicetype = 'slice' sliceargs = [] dt = utils.Dtime("PVSlice") # import here, otherwise sphinx cannot parse from impv import impv from imsmooth import imsmooth pvslice = self.getkey('slice') # x_s,y_s,x_e,y_e (start and end of line) pvslit = self.getkey('slit') # x_c,y_c,len,pa (center, length and PA of line) # BDP's used : # b10 = input BDP # b11 = input BDP (moment) # b12 = input BDP (new style cubestats w/ maxpos) # b2 = output BDP b10 = self._bdp_in[0] # input SpwCube fin = b10.getimagefile(bt.CASA) # input name b11 = self._bdp_in[1] # b12 = self._bdp_in[2] clip = self.getkey('clip') # clipping to data for Moment-of-Inertia gamma = self.getkey('gamma') # gamma factor to data for Moment-of-Inertia if b11 != None and len(pvslice) == 0 and len(pvslit) == 0: # if a map (e.g. cubesum ) given, and no slice/slit, get a best pvslice from that (pvslice,clip) = map_to_slit(self.dir(b11.getimagefile(bt.CASA)),clip=clip,gamma=gamma) elif b12 != None and len(pvslice) == 0 and len(pvslit) == 0: # PPP doesn't seem to work too well yet logging.debug("testing new slice computation from a PPP") max = b12.table.getColumnByName("max") maxposx = b12.table.getColumnByName("maxposx") maxposy = b12.table.getColumnByName("maxposy") if maxposx == None: raise Exception,"PPP was not enabled in your CubeStats" (pvslice,clip) = tab_to_slit([maxposx,maxposy,max],clip=clip,gamma=gamma) sliceargs = deepcopy(pvslice) if len(sliceargs)==0: logging.warning("no slice for plot yet") # ugh, this puts single quotes around the numbers formattedslice = str(["%.2f" % a for a in sliceargs]) taskargs = "slice="+formattedslice dt.tag("slice") pvname = self.mkext(fin,'pv') # output image name b2 = PVSlice_BDP(pvname) self.addoutput(b2) width = self.getkey('width') # @todo also: "4arcsec" (can't work since it's a single keyword) if len(pvslice) == 4: start = pvslice[:2] # @todo also allow: ["14h20m20.5s","-30d45m25.4s"] end = pvslice[2:] impv(self.dir(fin), self.dir(pvname),"coords",start=start,end=end,width=width,overwrite=True) elif len(pvslit) == 4: sumslicetype = 'slit' sliceargs = deepcopy(pvslit) formattedslice = str(["%.2f" % a for a in sliceargs]) taskargs = "slit="+formattedslice # length="40arcsec" same as {"value": 40, "unit": "arcsec"}) center = pvslit[:2] # @todo also: ["14h20m20.5s","-30d45m25.4s"]. length = pvslit[2] # @todo also: "40arcsec", {"value": 40, "unit": "arcsec"}) if type(pvslit[3]) is float or type(pvslit[3]) is int: pa = "%gdeg" % pvslit[3] else: pa = pvslit[3] impv(self.dir(fin), self.dir(pvname),"length",center=center,length=length,pa=pa,width=width,overwrite=True) else: raise Exception,"no valid input slit= or slice= or bad Moment_BDP input" sliceargs.append(width) taskargs = taskargs + " width=%d" % width dt.tag("impv") smooth = self.getkey('pvsmooth') if len(smooth) > 0: if len(smooth) == 1: smooth.append(smooth[0]) major = '%dpix' % smooth[0] minor = '%dpix' % smooth[1] logging.info("imsmooth PV slice: %s %s" % (major,minor)) imsmooth(self.dir(pvname), outfile=self.dir(pvname)+'.smooth',kernel='boxcar',major=major,minor=minor) dt.tag("imsmooth") # utils.rename(self.dir(pvname)+'.smooth',self.dir(pvname)) # @todo we will keep the smooth PVslice for inspection, no further flow work # get some statistics data = casautil.getdata_raw(self.dir(pvname)) rpix = stats.robust(data.flatten()) r_mean = rpix.mean() r_std = rpix.std() r_max = rpix.max() logging.info("PV stats: mean/std/max %f %f %f" % (r_mean, r_std, r_max)) logging.regression("PVSLICE: %f %f %f" % (r_mean, r_std, r_max)) myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir()) # hack to get a slice on a mom0map # @todo if pmode is not png, can viewer handle this? figname = pvname + ".png" slicename = self.dir(figname) overlay = "pvoverlay" if b11 != None: f11 = b11.getimagefile(bt.CASA) taskinit.tb.open(self.dir(f11)) data = taskinit.tb.getcol('map') nx = data.shape[0] ny = data.shape[1] taskinit.tb.close() d1 = np.flipud(np.rot90 (data.reshape((nx,ny)))) if len(pvslice) == 4: segm = [[pvslice[0],pvslice[2],pvslice[1],pvslice[3]]] pa = np.arctan2(pvslice[2]-pvslice[0],pvslice[1]-pvslice[3])*180.0/np.pi title = "PV Slice location : slice PA=%.1f" % pa elif len(pvslit) == 4: # can only do this now if using pixel coordinates xcen = pvslit[0] ycen = ny-pvslit[1]-1 slen = pvslit[2] pard = pvslit[3]*np.pi/180.0 cosp = np.cos(pard) sinp = np.sin(pard) halflen = 0.5*slen segm = [[xcen-halflen*sinp,xcen+halflen*sinp,ycen-halflen*cosp,ycen+halflen*cosp]] pa = pvslit[3] title = "PV Slice location : slit PA=%g" % pa else: # bogus, some error in pvslice logging.warning("bogus segm since pvslice=%s" % str(pvslice)) segm = [[10,20,10,20]] pa = -999.999 title = "PV Slice location - bad PA" logging.info("MAP1 segm %s %s" % (str(segm),str(pvslice))) if d1.max() < clip: logging.warning("datamax=%g, clip=%g" % (d1.max(), clip)) title = title + ' (no signal over %g?)' % clip myplot.map1(d1,title,overlay,segments=segm,thumbnail=True) else: myplot.map1(d1,title,overlay,segments=segm,range=[clip],thumbnail=True) dt.tag("plot") overlayname = myplot.getFigure(figno=myplot.figno,relative=True) overlaythumbname = myplot.getThumbnail(figno=myplot.figno,relative=True) Qover = True else: Qover = False implot = ImPlot(pmode=self._plot_mode,ptype=self._plot_type,abspath=self.dir()) implot.plotter(rasterfile=pvname, figname=pvname, colorwedge=True) thumbname = implot.getThumbnail(figno=implot.figno,relative=True) figname = implot.getFigure(figno=implot.figno,relative=True) if False: # debug: # # @todo tmp1 is ok, tmp2 is not displaying the whole thing # old style: viewer() seems to plot full image, but imview() wants square pixels? casa.viewer(infile=self.dir(pvname), outfile=self.dir('tmp1.pv.png'), gui=False, outformat="png") casa.imview(raster={'file':self.dir(pvname), 'colorwedge' : True, 'scaling':-1}, axes={'y':'Declination'}, out=self.dir('tmp2.pv.png')) # # -> this one works, axes= should be correct # imview(raster={'file':'x.pv', 'colorwedge' : True, 'scaling':-1},axes={'y':'Frequency'}) # # @TODO big fixme, we're going to reuse 'tmp1.pv.png' because implot give a broken view figname = 'tmp1.pv.png' # @todo technically we don't know what map it was overlay'd on.... CubeSum/Moment0 overlaycaption = "Location of position-velocity slice overlaid on a CubeSum map" pvcaption = "Position-velocity diagram through emission centroid" pvimage = Image(images={bt.CASA : pvname, bt.PNG : figname},thumbnail=thumbname,thumbnailtype=bt.PNG, description=pvcaption) b2.setkey("image",pvimage) b2.setkey("mean",float(r_mean)) b2.setkey("sigma",float(r_std)) if Qover: thispvsummary = [sumslicetype,sliceargs,figname,thumbname,pvcaption,overlayname,overlaythumbname,overlaycaption,pvname,fin] else: thispvsummary = [sumslicetype,sliceargs,figname,thumbname,pvcaption,pvname,fin] # Yes, this is a nested list. Against the day when PVSLICE can # compute multiple slices per map. pvslicesummary.append(thispvsummary) self._summary["pvslices"] = SummaryEntry(pvslicesummary,"PVSlice_AT",self.id(True),taskargs) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ self._summary = {} dt = utils.Dtime("Smooth") dt.tag("start") # get the input keys bmaj = self.getkey("bmaj") bmin = self.getkey("bmin") bpa = self.getkey("bpa") velres = self.getkey("velres") # take care of potential issues in the unit strings # @todo if not provided? bmaj['unit'] = bmaj['unit'].lower() bmin['unit'] = bmin['unit'].lower() velres['unit'] = velres['unit'].lower() taskargs = "bmaj=%s bmin=%s bpa=%s velres=%s" % (bmaj,bmin,bpa,velres) bdpnames=[] for ibdp in self._bdp_in: istem = ibdp.getimagefile(bt.CASA) image_in = ibdp.baseDir() + istem bdp_name = self.mkext(istem,'sim') image_out = self.dir(bdp_name) taskinit.ia.open(image_in) h = casa.imhead(image_in, mode='list') pix_scale = np.abs(h['cdelt1'] * 206265.0) # pix scale in asec @todo QA ? CC = 299792458.0 # speed of light @todo somewhere else [utils.c , but in km/s] rest_freq = h['crval3'] # frequency pixel scale in km/s vel_scale = np.abs(CC*h['cdelt3']/rest_freq/1000.0) # unit conversion to arcsec (spatial) or km/s # (velocity) or some flavor of Hz. if(bmaj['unit'] == 'pixel'): bmaj = bmaj['value']*pix_scale else: bmaj = bmaj['value'] if(bmin['unit'] == 'pixel'): bmin = bmin['value']*pix_scale else: bmin = bmin['value'] hertz_input = False if velres['unit'] == 'pixel': velres['value'] = velres['value']*vel_scale velres['unit'] = 'km/s' elif velres['unit'] == 'm/s': velres['value'] = velres['value']/1000.0 velres['unit'] = 'km/s' elif velres['unit'][-2:] == 'hz': hertz_input = True elif velres['unit'] == 'km/s': pass else: logging.error("Unknown units in velres=%s" % velres['unit']) rdata = bmaj # we smooth in velocity first. if smoothing in velocity # the cube apparently must be closed afterwards and # then reopened if spatial smoothing is to be done. if velres['value'] > 0: # handle the different units allowed. CASA doesn't # like lowercase for hz units... if not hertz_input: freq_res = str(velres['value']*1000.0/CC *rest_freq )+'Hz' else: freq_res = str(velres['value']) # try to convert velres to km/s for debug purposes velres['value'] = velres['value']/rest_freq*CC / 1000.0 if(velres['unit'] == 'khz'): velres['value'] = velres['value']*1000.0 velres['unit'] = 'kHz' elif(velres['unit']=='mhz'): velres['value'] = velres['value']*1E6 velres['unit'] = 'MHz' elif(velres['unit']=='ghz'): velres['value'] = velres['value']*1E9 velres['unit'] = 'GHz' freq_res = freq_res + velres['unit'] # NB: there is apparently a bug in CASA. only smoothing along the frequency # axis does not work. sepconvolve gives a unit error (says axis unit is radian rather # than Hz). MUST smooth in 2+ dimensions if you want this to work. if(velres['value'] < vel_scale): raise Exception,"Desired velocity resolution %g less than pixel scale %g" % (velres['value'],vel_scale) image_tmp = self.dir('tmp.smooth') im2=taskinit.ia.sepconvolve(outfile=image_tmp,axes=[0,1,2], types=["boxcar","boxcar","gauss"],\ widths=['1pix','1pix',freq_res], overwrite=True) im2.done() logging.debug("sepconvolve to %s" % image_out) # for some reason, doing this in memory does not seem to work, so outfile must be specified. logging.info("Smoothing cube to a velocity resolution of %s km/s" % str(velres['value'])) logging.info("Smoothing cube to a frequency resolution of %s" % freq_res) taskinit.ia.close() taskinit.ia.open(image_tmp) dt.tag("sepconvolve") else: image_tmp = image_out # now do the spatial smoothing convolve_to_min_beam = True # default is to convolve to a min enclosing beam if bmaj > 0 and bmin > 0: # form qa objects out of these so that casa can understand bmaj = taskinit.qa.quantity(bmaj,'arcsec') bmin = taskinit.qa.quantity(bmin,'arcsec') bpa = taskinit.qa.quantity(bpa,'deg') target_res={} target_res['major'] = bmaj target_res['minor'] = bmin target_res['positionangle'] = bpa # throw an exception if cannot be convolved try: # for whatever reason, if you give convolve2d a beam parameter, # it complains ... im2=taskinit.ia.convolve2d(outfile=image_out,major = bmaj,\ minor = bmin, pa = bpa,\ targetres=True,overwrite=True) im2.done() logging.info("Smoothing cube to a resolution of %s by %s at a PA of %s" % (str(bmaj['value']), str(bmin['value']), str(bpa['value']))) convolve_to_min_beam = False achieved_res = target_res except: # @todo remind what you need ? logging.error("Warning: Could not convolve to requested resolution of "\ +str(bmaj['value']) + " by " + str(bmin['value']) + \ " at a PA of "+ str(bpa['value'])) raise Exception,"Could not convolve to beam given!" dt.tag("convolve2d-1") if convolve_to_min_beam: restoring_beams = taskinit.ia.restoringbeam() commonbeam = taskinit.ia.commonbeam() # for whatever reason, setrestoringbeam does not use the same set of hashes... commonbeam['positionangle']=commonbeam['pa'] del commonbeam['pa'] # if there's one beam, apparently the beams keyword does not exist if 'beams' in restoring_beams: print "Smoothing cube to a resolution of "+ \ str(commonbeam['major']['value']) +" by "+ \ str(commonbeam['minor']['value'])+" at a PA of "\ +str(commonbeam['pa']['value']) target_res = commonbeam im2=taskinit.ia.convolve2d(outfile=image_out,major=commonbeam['major'],\ minor=commonbeam['minor'],\ pa=commonbeam['positionangle'],\ targetres=True,overwrite=True) im2.done() achieved_res = commonbeam dt.tag("convolve2d-2") else: print "One beam for all planes. Smoothing to common beam redundant." achieved_res = commonbeam if velres['value'] < 0: taskinit.ia.fromimage(outfile=image_out, infile=image_in) # not really doing anything # else, we've already done what we needed to taskinit.ia.setrestoringbeam(beam = achieved_res) rdata = achieved_res['major']['value'] # else do no smoothing and just close the image taskinit.ia.close() dt.tag("close") b1 = SpwCube_BDP(bdp_name) self.addoutput(b1) # need to update for multiple images. b1.setkey("image", Image(images={bt.CASA:bdp_name})) bdpnames = bdpnames.append(bdp_name) # and clean up the temp image before the next image if velres['value'] > 0: utils.remove(image_tmp) # thes are task arguments not summary entries. _bmaj = taskinit.qa.convert(achieved_res['major'],'rad')['value'] _bmin = taskinit.qa.convert(achieved_res['minor'],'rad')['value'] _bpa = taskinit.qa.convert(achieved_res['positionangle'],'deg')['value'] vres = "%.2f %s" % (velres['value'],velres['unit']) logging.regression("SMOOTH: %f %f" % (rdata,velres['value'])) self._summary["smooth"] = SummaryEntry([bdp_name,convolve_to_min_beam,_bmaj,_bmin,_bpa,vres],"Smooth_AT",self.id(True),taskargs) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ dt = utils.Dtime("CubeSum") # tagging time self._summary = {} # an ADMIT summary will be created here numsigma = self.getkey("numsigma") # get the input keys sigma = self.getkey("sigma") use_lines = self.getkey("linesum") pad = self.getkey("pad") b1 = self._bdp_in[0] # spw image cube b1a = self._bdp_in[1] # cubestats (optional) b1b = self._bdp_in[2] # linelist (optional) f1 = b1.getimagefile(bt.CASA) taskinit.ia.open(self.dir(f1)) s = taskinit.ia.summary() nchan = s['shape'][2] if b1b != None: ch0 = b1b.table.getFullColumnByName("startchan") ch1 = b1b.table.getFullColumnByName("endchan") s = Segments(ch0, ch1, nchan=nchan) # @todo something isn't merging here as i would have expected, # e.g. test0.fits [(16, 32), (16, 30), (16, 29)] if pad > 0: for (c0, c1) in s.getsegmentsastuples(): s.append([c0 - pad, c0]) s.append([c1, c1 + pad]) s.merge() s.recalcmask() # print "PJT segments:",s.getsegmentsastuples() ns = len(s.getsegmentsastuples()) chans = s.chans(not use_lines) if use_lines: msum = s.getmask() else: msum = 1 - s.getmask() logging.info("Read %d segments" % ns) # print "chans",chans # print "msum",msum # from a deprecated keyword, but kept here to pre-smooth the spectrum before clipping # examples are: ['boxcar',3] ['gaussian',7] ['hanning',5] smooth = [] sig_const = False # figure out if sigma is taken as constant in the cube if b1a == None: # if no 2nd BDP was given, sigma needs to be specified if sigma <= 0.0: raise Exception, "Neither user-supplied sigma nor CubeStats_BDP input given. One is required." else: sig_const = True # and is constant else: if sigma > 0: sigma = b1a.get("sigma") sig_const = True if sig_const: logging.info("Using constant sigma = %f" % sigma) else: logging.info("Using varying sigma per plane") infile = b1.getimagefile(bt.CASA) # ADMIT filename of the image (cube) bdp_name = self.mkext( infile, 'csm' ) # morph to the new output name with replaced extension 'csm' image_out = self.dir(bdp_name) # absolute filename args = { "imagename": self.dir(infile) } # assemble arguments for immoments() args["moments"] = 0 # only need moments=0 (or [0] is ok as well) args["outfile"] = image_out # note full pathname dt.tag("start") if sig_const: args["excludepix"] = [-numsigma * sigma, numsigma * sigma] # single global sigma if b1b != None: # print "PJT: ",chans args["chans"] = chans else: # @todo in this section bad channels can cause a fully masked cubesum = bad # cubestats input sigma_array = b1a.table.getColumnByName( "sigma") # channel dependent sigma sigma_pos = sigma_array[np.where(sigma_array > 0)] smin = sigma_pos.min() smax = sigma_pos.max() logging.info("sigma varies from %f to %f" % (smin, smax)) maxval = b1a.get("maxval") # max in cube nzeros = len(np.where(sigma_array <= 0.0)[0]) # check bad channels if nzeros > 0: logging.warning("There are %d NaN channels " % nzeros) # raise Exception,"need to recode CubeSum or use constant sigma" dt.tag("grab_sig") if len(smooth) > 0: # see also LineID and others filter = Filter1D.Filter1D( sigma_array, smooth[0], **Filter1D.Filter1D.convertargs(smooth)) sigma_array = filter.run() dt.tag("smooth_sig") # create a CASA image copy for making the mirror sigma cube to mask against file = self.dir(infile) mask = file + "_mask" taskinit.ia.fromimage(infile=file, outfile=mask) nx = taskinit.ia.shape()[0] ny = taskinit.ia.shape()[1] nchan = taskinit.ia.shape()[2] taskinit.ia.fromshape(shape=[nx, ny, 1]) plane = taskinit.ia.getchunk( [0, 0, 0], [-1, -1, 0]) # convenience plane for masking operation dt.tag("mask_sig") taskinit.ia.open(mask) dt.tag("open_mask") count = 0 for i in range(nchan): if sigma_array[i] > 0: if b1b != None: if msum[i]: taskinit.ia.putchunk(plane * 0 + sigma_array[i], blc=[0, 0, i, -1]) count = count + 1 else: taskinit.ia.putchunk(plane * 0 + maxval, blc=[0, 0, i, -1]) else: taskinit.ia.putchunk(plane * 0 + sigma_array[i], blc=[0, 0, i, -1]) count = count + 1 else: taskinit.ia.putchunk(plane * 0 + maxval, blc=[0, 0, i, -1]) taskinit.ia.close() logging.info("%d/%d channels used for CubeSum" % (count, nchan)) dt.tag("close_mask") names = [file, mask] tmp = file + '.tmp' if numsigma == 0.0: # hopefully this will also make use of the mask exp = "IM0[IM1<%f]" % (0.99 * maxval) else: exp = "IM0[abs(IM0/IM1)>%f]" % (numsigma) # print "PJT: exp",exp casa.immath(mode='evalexpr', imagename=names, expr=exp, outfile=tmp) args["imagename"] = tmp dt.tag("immath") casa.immoments(**args) dt.tag("immoments") if sig_const is False: # get rid of temporary files utils.remove(tmp) utils.remove(mask) # get the flux taskinit.ia.open(image_out) st = taskinit.ia.statistics() taskinit.ia.close() dt.tag("statistics") # report that flux, but there's no way to get the units from casa it seems # ia.summary()['unit'] is usually 'Jy/beam.km/s' for ALMA # imstat() does seem to know it. if st.has_key('flux'): rdata = [st['flux'][0], st['sum'][0]] logging.info("Total flux: %f (sum=%f)" % (st['flux'], st['sum'])) else: rdata = [st['sum'][0]] logging.info("Sum: %f (beam parameters missing)" % (st['sum'])) logging.regression("CSM: %s" % str(rdata)) # Create two output images for html and their thumbnails, too implot = ImPlot(ptype=self._plot_type, pmode=self._plot_mode, abspath=self.dir()) implot.plotter(rasterfile=bdp_name, figname=bdp_name, colorwedge=True) figname = implot.getFigure(figno=implot.figno, relative=True) thumbname = implot.getThumbnail(figno=implot.figno, relative=True) dt.tag("implot") thumbtype = bt.PNG # really should be correlated with self._plot_type!! # 2. Create a histogram of the map data # get the data for a histogram data = casautil.getdata(image_out, zeromask=True).compressed() dt.tag("getdata") # get the label for the x axis bunit = casa.imhead(imagename=image_out, mode="get", hdkey="bunit") # Make the histogram plot # Since we give abspath in the constructor, figname should be relative myplot = APlot(ptype=self._plot_type, pmode=self._plot_mode, abspath=self.dir()) auxname = bdp_name + "_histo" auxtype = bt.PNG # really should be correlated with self._plot_type!! myplot.histogram(columns=data, figname=auxname, xlab=bunit, ylab="Count", title="Histogram of CubeSum: %s" % (bdp_name), thumbnail=True) auxname = myplot.getFigure(figno=myplot.figno, relative=True) auxthumb = myplot.getThumbnail(figno=myplot.figno, relative=True) images = {bt.CASA: bdp_name, bt.PNG: figname} casaimage = Image(images=images, auxiliary=auxname, auxtype=auxtype, thumbnail=thumbname, thumbnailtype=thumbtype) if hasattr(b1, "line"): # SpwCube doesn't have Line line = deepcopy(getattr(b1, "line")) if type(line) != type(Line): line = Line(name="Undetermined") else: line = Line(name="Undetermined") # fake a Line if there wasn't one self.addoutput( Moment_BDP(xmlFile=bdp_name, moment=0, image=deepcopy(casaimage), line=line)) imcaption = "Integral (moment 0) of all emission in image cube" auxcaption = "Histogram of cube sum for image cube" taskargs = "numsigma=%.1f sigma=%g smooth=%s" % (numsigma, sigma, str(smooth)) self._summary["cubesum"] = SummaryEntry([ figname, thumbname, imcaption, auxname, auxthumb, auxcaption, bdp_name, infile ], "CubeSum_AT", self.id(True), taskargs) dt.tag("done") dt.end()
def run(self, **keyval): """ Method to calculate the continuum from the given masked spectrum. If search=True is given as an argument then the algorithm will iterate through the different order splines to find the best fit, based on noise level. Parameters ---------- keyval : dictionary Dictionary containing the keyword value pair arguments Returns ------- numpy array containing the best fit continuum Notes ----- Arguments for the run method: - search : bool, whether or not to search for the best fit. Default: False - bbox : array_like, 2-sequence specifying the boundary of the approximation interval. If None (default), ``bbox=[x[0], x[-1]]``. - k : int 1 < k <= 5, the degree of spline smoothing to use, Defualt: 3 - s : float or None Positive smoothing factor used to choose the number of knots. Number of knots will be increased until the smoothing condition is satisfied:: sum((w[i] * (y[i]-spl(x[i])))**2, axis=0) <= s If None (default), ``s = len(w)`` which should be a good value if ``1/w[i]`` is an estimate of the standard deviation of ``y[i]``. If 0, spline will interpolate through all data points. - ext : int or str Controls the extrapolation mode for elements not in the interval defined by the knot sequence. * if ext=0 or 'extrapolate', return the extrapolated value. * if ext=1 or 'zeros', return 0 * if ext=2 or 'raise', raise a ValueError * if ext=3 of 'const', return the boundary value. The default value is 0. - check_finite : bool Whether to check that the input arrays contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination or non-sensical results) if the inputs do contain infinities or NaNs. Default is False. """ # set up the data elements args = { "x": self.x, "y": self.y.data, # reverse the weights since a masked array uses True for good values # and UnivariateSpline needs a number. The reversal translates the # True values to False, which are then interpreted as 0.0 "w": -self.y.mask } # get the given arguments search = False noise = None if "search" in keyval: search = keyval["search"] if "noise" in keyval: noise = keyval["noise"] if "chisq" in keyval: maxchisq = keyval["chisq"] for arg in ["bbox", "k", "s", "ext", "check_finite"]: if arg in keyval: args[arg] = keyval[arg] # if searching for the best fit # limited to 3rd order as 4th and 5th order could fit weak wide lines if search: chisq = {1: 1000., 2: 1000., 3: 1000.} # iterate over each possible order for k in chisq: args["k"] = k spl = UnivariateSpline(**args) fit = spl(self.x) chisq[k] = stats.reducedchisquared(self.y, fit, k + 1, noise) # find the best fit, if chisq values are close (<20%), then prefer the lowest order mv = 1000. order = 0 for k in chisq: if chisq[k] < mv and (mv - chisq[k]) / mv > 0.2: mv = chisq[k] order = k # if we have a really poor fit then just give up if mv > maxchisq: logging.warning("No good fit for continuum found") return None args["k"] = order logging.info("Using fit of order %i with chi^2 of %f" % (order, mv)) # do the final fit spl = UnivariateSpline(**args) fit = spl(self.x) else: # do the fit with the given parameters spl = UnivariateSpline(**args) fit = spl(self.x) return fit
def run(self): """Runs the task. Parameters ---------- None Returns ------- None """ self._summary = {} dt = utils.Dtime("CubeSpectrum") # our BDP's # b1 = input BDP # b1s = optional input CubeSpectrum # b1m = optional input Moment # b1p = optional input SourceList for positions # b2 = output BDP b1 = self._bdp_in[0] # check input SpwCube (or LineCube) fin = b1.getimagefile(bt.CASA) if self._bdp_in[0]._type == bt.LINECUBE_BDP: use_vel = True else: use_vel = False sources = self.getkey("sources") pos = [] # blank it first, then try and grab it from the optional bdp_in's cmean = 0.0 csigma = 0.0 smax = [] # accumulate max in each spectrum for regression self.spec_description = [] # for summary() if self._bdp_in[1] != None: # check if CubeStats_BDP #print "BDP[1] type: ",self._bdp_in[1]._type if self._bdp_in[1]._type != bt.CUBESTATS_BDP: raise Exception,"bdp_in[1] not a CubeStats_BDP, should never happen" # a table (cubestats) b1s = self._bdp_in[1] pos.append(b1s.maxpos[0]) pos.append(b1s.maxpos[1]) logging.info('CubeStats::maxpos,val=%s,%f' % (str(b1s.maxpos),b1s.maxval)) cmean = b1s.mean csigma = b1s.sigma dt.tag("CubeStats-pos") if self._bdp_in[2] != None: # check if Moment_BDP (probably from CubeSum) #print "BDP[2] type: ",self._bdp_in[2]._type if self._bdp_in[2]._type != bt.MOMENT_BDP: raise Exception,"bdp_in[2] not a Moment_BDP, should never happen" b1m = self._bdp_in[2] fim = b1m.getimagefile(bt.CASA) pos1,maxval = self.maxpos_im(self.dir(fim)) # compute maxpos, since it is not in bdp (yet) logging.info('CubeSum::maxpos,val=%s,%f' % (str(pos1),maxval)) pos.append(pos1[0]) pos.append(pos1[1]) dt.tag("Moment-pos") if self._bdp_in[3] != None: # check if SourceList #print "BDP[3] type: ",self._bdp_in[3]._type # a table (SourceList) b1p = self._bdp_in[3] ra = b1p.table.getFullColumnByName("RA") dec = b1p.table.getFullColumnByName("DEC") peak = b1p.table.getFullColumnByName("Peak") if sources == []: # use the whole SourceList for (r,d,p) in zip(ra,dec,peak): rdc = convert_sexa(r,d) pos.append(rdc[0]) pos.append(rdc[1]) logging.info('SourceList::maxpos,val=%s,%f' % (str(rdc),p)) else: # select specific ones from the source list for ipos in sources: if ipos < len(ra): radec = convert_sexa(ra[ipos],dec[ipos]) pos.append(radec[0]) pos.append(radec[1]) logging.info('SourceList::maxpos,val=%s,%f' % (str(radec),peak[ipos])) else: logging.warning('Skipping illegal source number %d' % ipos) dt.tag("SourceList-pos") # if pos[] still blank, use the AT keyword. if len(pos) == 0: pos = self.getkey("pos") # if still none, try the map center if len(pos) == 0: # @todo this could result in a masked pixel and cause further havoc # @todo could also take the reference pixel, but that could be outside image taskinit.ia.open(self.dir(fin)) s = taskinit.ia.summary() pos = [int(s['shape'][0])/2, int(s['shape'][1])/2] logging.warning("No input positions supplied, map center choosen: %s" % str(pos)) dt.tag("map-center") # exhausted all sources where pos[] can be set; if still zero, bail out if len(pos) == 0: raise Exception,"No positions found from input BDP's or pos=" # convert this regular list to a list of tuples with duplicates removed # sadly the order is lost. pos = list(set(zip(pos[0::2],pos[1::2]))) npos = len(pos) dt.tag("open") bdp_name = self.mkext(fin,"csp") b2 = CubeSpectrum_BDP(bdp_name) self.addoutput(b2) imval = range(npos) # spectra, one for each pos (placeholder) planes = range(npos) # labels for the tables (placeholder) images = {} # png's accumulated for i in range(npos): # loop over pos, they can have mixed types now sd = [] caption = "Spectrum" xpos = pos[i][0] ypos = pos[i][1] if type(xpos) != type(ypos): print "POS:",xpos,ypos raise Exception,"position pair not of the same type" if type(xpos)==int: # for integers, boxes are allowed, even multiple box = '%d,%d,%d,%d' % (xpos,ypos,xpos,ypos) # convention for summary is (box) cbox = '(%d,%d,%d,%d)' % (xpos,ypos,xpos,ypos) # use extend here, not append, we want individual values in a list sd.extend([xpos,ypos,cbox]) caption = "Average Spectrum at %s" % cbox if False: # this will fail on 3D cubes (see CAS-7648) imval[i] = casa.imval(self.dir(fin),box=box) else: # work around that CAS-7648 bug # another approach is the ia.getprofile(), see CubeStats, this will # also integrate over regions, imval will not (!!!) region = 'centerbox[[%dpix,%dpix],[1pix,1pix]]' % (xpos,ypos) caption = "Average Spectrum at %s" % region imval[i] = casa.imval(self.dir(fin),region=region) elif type(xpos)==str: # this is tricky, to stay under 1 pixel , or you get a 2x2 back. region = 'centerbox[[%s,%s],[1pix,1pix]]' % (xpos,ypos) caption = "Average Spectrum at %s" % region sd.extend([xpos,ypos,region]) imval[i] = casa.imval(self.dir(fin),region=region) else: print "Data type: ",type(xpos) raise Exception,"Data type for region not handled" dt.tag("imval") flux = imval[i]['data'] if len(flux.shape) > 1: # rare case if we step on a boundary between cells? logging.warning("source %d has spectrum shape %s: averaging the spectra" % (i,repr(flux.shape))) flux = np.average(flux,axis=0) logging.debug('minmax: %f %f %d' % (flux.min(),flux.max(),len(flux))) smax.append(flux.max()) if i==0: # for first point record few extra things if len(imval[i]['coords'].shape) == 2: # normal case: 1 pixel freqs = imval[i]['coords'].transpose()[2]/1e9 # convert to GHz @todo: input units ok? elif len(imval[i]['coords'].shape) == 3: # rare case if > 1 point in imval() freqs = imval[i]['coords'][0].transpose()[2]/1e9 # convert to GHz @todo: input units ok? else: logging.fatal("bad shape %s in freq return from imval - SHOULD NEVER HAPPEN" % imval[i]['coords'].shape) chans = np.arange(len(freqs)) # channels 0..nchans-1 unit = imval[i]['unit'] restfreq = casa.imhead(self.dir(fin),mode="get",hdkey="restfreq")['value']/1e9 # in GHz dt.tag("imhead") vel = (1-freqs/restfreq)*utils.c # @todo : use a function (and what about relativistic?) # construct the Table for CubeSpectrum_BDP # @todo note data needs to be a tuple, later to be column_stack'd labels = ["channel" ,"frequency" ,"flux" ] units = ["number" ,"GHz" ,unit ] data = (chans ,freqs ,flux ) if i==0: # plane 0 : we are allowing a multiplane table, so the first plane is special table = Table(columns=labels,units=units,data=np.column_stack(data),planes=["0"]) else: # planes 1,2,3.... are stacked onto the previous one table.addPlane(np.column_stack(data),"%d" % i) # example plot , one per position for now if use_vel: x = vel xlab = 'VLSR (km/s)' else: x = chans xlab = 'Channel' y = [flux] sd.append(xlab) if type(xpos)==int: # grab the RA/DEC... kludgy h = casa.imstat(self.dir(fin),box=box) ra = h['blcf'].split(',')[0] dec = h['blcf'].split(',')[1] title = '%s %d @ %d,%d = %s,%s' % (bdp_name,i,xpos,ypos,ra,dec) else: title = '%s %d @ %s,%s' % (bdp_name,i,xpos,ypos) # or use box, once we allow non-points myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode, abspath=self.dir()) ylab = 'Flux (%s)' % unit p1 = "%s_%d" % (bdp_name,i) myplot.plotter(x,y,title,p1,xlab=xlab,ylab=ylab,thumbnail=True) # Why not use p1 as the key? ii = images["pos%d" % i] = myplot.getFigure(figno=myplot.figno,relative=True) thumbname = myplot.getThumbnail(figno=myplot.figno,relative=True) sd.extend([ii, thumbname, caption, fin]) self.spec_description.append(sd) logging.regression("CSP: %s" % str(smax)) image = Image(images=images, description="CubeSpectrum") b2.setkey("image",image) b2.setkey("table",table) b2.setkey("sigma",csigma) # TODO: not always available b2.setkey("mean",cmean) # TODO: not always available if True: # @todo only first plane due to limitation in exportTable() islash = bdp_name.find('/') if islash < 0: tabname = self.dir("testCubeSpectrum.tab") else: tabname = self.dir(bdp_name[:islash] + "/testCubeSpectrum.tab") table.exportTable(tabname,cols=["frequency" ,"flux"]) dt.tag("done") # For a single spectrum this is # SummaryEntry([[data for spec1]], "CubeSpectrum_AT",taskid) # For multiple spectra this is # SummaryEntry([[data for spec1],[data for spec2],...], "CubeSpectrum_AT",taskid) self._summary["spectra"] = SummaryEntry(self.spec_description,"CubeSpectrum_AT",self.id(True)) taskargs = "pos="+str(pos) taskargs += ' <span style="background-color:white"> ' + fin.split('/')[0] + ' </span>' for v in self._summary: self._summary[v].setTaskArgs(taskargs) dt.tag("summary") dt.end()
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ dt = utils.Dtime("SFind2D") # tagging time self._summary = {} # get key words that user input nsigma = self.getkey("numsigma") sigma = self.getkey("sigma") region = self.getkey("region") robust = self.getkey("robust") snmax = self.getkey("snmax") ds9 = True # writes a "ds9.reg" file mpl = True # aplot.map1() plot dynlog = 20.0 # above this value of dyn range finder chart is log I-scaled bpatch = True # patch units to Jy/beam for ia.findsources() # get the input casa image from bdp[0] bdpin = self._bdp_in[0] infile = bdpin.getimagefile(bt.CASA) if mpl: data = np.flipud(np.rot90(casautil.getdata(self.dir(infile)).data)) # check if there is a 2nd image (which will be a PB) for i in range(len(self._bdp_in)): print 'BDP',i,type(self._bdp_in[i]) if self._bdp_in[2] != None: bdpin_pb = self._bdp_in[1] bdpin_cst = self._bdp_in[2] print "Need to process PB" else: bdpin_pb = None bdpin_cst = self._bdp_in[1] print "No PB given" # get the output bdp basename slbase = self.mkext(infile,'sl') # make sure it's a 2D map if not casautil.mapdim(self.dir(infile),2): raise Exception,"Input map dimension not 2: %s" % infile # arguments for imstat call if required args = {"imagename" : self.dir(infile)} if region != "": args["region"] = region dt.tag("start") # The following code sets the sigma level for searching for sources using # the sigma and snmax keyword as appropriate # if no CubeStats BDP was given and no sigma was specified: # find a noise level via casa.imstat() # if a CubeStat_BDP is given get it from there. if bdpin_cst == None: # get statistics from input image with imstat because no CubeStat_BDP stat = casa.imstat(**args) dmin = float(stat["min"][0]) # these would be wrong if robust were used already dmax = float(stat["max"][0]) args.update(casautil.parse_robust(robust)) # only now add robust keywords for the sigma stat = casa.imstat(**args) if sigma <= 0.0 : sigma = float(stat["sigma"][0]) dt.tag("imstat") else: # get statistics from CubeStat_BDP sigma = bdpin_cst.get("sigma") dmin = bdpin_cst.get("minval") dmax = bdpin_cst.get("maxval") self.setkey("sigma",sigma) # calculate cutoff based either on RMS or dynamic range limitation drange = dmax/(nsigma*sigma) if snmax < 0.0 : snmax = drange if drange > snmax : cutoff = 1.0/snmax else: cutoff = 1.0/drange logging.info("sigma, dmin, dmax, snmax, cutoff %g %g %g %g %g" % (sigma, dmin, dmax, snmax, cutoff)) # define arguments for call to findsources args2 = {"cutoff" : cutoff} args2["nmax"] = 30 if region != "" : args2["region"] = region #args2["mask"] = "" args2["point"] = False args2["width"] = 5 args2["negfind"] = False # set-up for SourceList_BDP slbdp = SourceList_BDP(slbase) # connect to casa image and call casa ia.findsources tool taskinit.ia.open(self.dir(infile)) # findsources() cannot deal with 'Jy/beam.km/s' ??? # so for the duration of findsources() we patch it bunit = taskinit.ia.brightnessunit() if bpatch and bunit != 'Jy/beam': logging.warning("Temporarely patching your %s units to Jy/beam for ia.findsources()" % bunit) taskinit.ia.setbrightnessunit('Jy/beam') else: bpatch = False atab = taskinit.ia.findsources(**args2) if bpatch: taskinit.ia.setbrightnessunit(bunit) taskargs = "nsigma=%4.1f sigma=%g region=%s robust=%s snmax=%5.1f" % (nsigma,sigma,str(region),str(robust),snmax) dt.tag("findsources") nsources = atab["nelements"] xtab = [] ytab = [] logscale = False sumflux = 0.0 if nsources > 0: # @TODO: Why are Xpix, YPix not stored in the table? # -> PJT: I left them out since they are connected to an image which may not be available here # but we should store the frequency of the observation here for later bandmerging logging.debug("%s" % str(atab['component0']['shape'])) logging.info("Right Ascen. Declination X(pix) Y(pix) Peak Flux Major Minor PA SNR") funits = atab['component0']['flux']['unit'] if atab['component0']['shape'].has_key('majoraxis'): sunits = atab['component0']['shape']['majoraxis']['unit'] aunits = atab['component0']['shape']['positionangle']['unit'] else: sunits = "n/a" aunits = "n/a" punits = taskinit.ia.summary()['unit'] logging.info(" %s %s %s %s %s" % (punits,funits,sunits,sunits,aunits)) # # @todo future improvement is to look at image coordinates and control output appropriately # if ds9: # @todo variable name regname = self.mkext(infile,'ds9.reg') fp9 = open(self.dir(regname),"w!") for i in range(nsources): c = "component%d" % i name = "%d" % (i+1) r = atab[c]['shape']['direction']['m0']['value'] d = atab[c]['shape']['direction']['m1']['value'] pixel = taskinit.ia.topixel([r,d]) xpos = pixel['numeric'][0] ypos = pixel['numeric'][1] rd = taskinit.ia.toworld([xpos,ypos],'s') ra = rd['string'][0][:12] dec = rd['string'][1][:12] flux = atab[c]['flux']['value'][0] sumflux = sumflux + flux if atab[c]['shape'].has_key('majoraxis'): smajor = atab[c]['shape']['majoraxis']['value'] sminor = atab[c]['shape']['minoraxis']['value'] sangle = atab[c]['shape']['positionangle']['value'] else: smajor = 0.0 sminor = 0.0 sangle = 0.0 peakstr = taskinit.ia.pixelvalue([xpos,ypos,0,0]) if len(peakstr) == 0: logging.warning("Problem with source %d @ %d,%d" % (i,xpos,ypos)) continue peakf = peakstr['value']['value'] snr = peakf/sigma if snr > dynlog: logscale = True logging.info("%s %s %8.2f %8.2f %10.3g %10.3g %7.3f %7.3f %6.1f %6.1f" % (ra,dec,xpos,ypos,peakf,flux,smajor,sminor,sangle,snr)) xtab.append(xpos) ytab.append(ypos) slbdp.addRow([name,ra,dec,flux,peakf,smajor,sminor,sangle]) if ds9: ras = ra des = dec.replace('.',':',2) msg = 'ellipse(%s,%s,%g",%g",%g) # text={%s}' % (ras,des,smajor,sminor,sangle+90.0,i+1) fp9.write("%s\n" % msg) if ds9: fp9.close() logging.info("Wrote ds9.reg") dt.tag("table") logging.regression("CONTFLUX: %d %g" % (nsources,sumflux)) summary = taskinit.ia.summary() beammaj = summary['restoringbeam']['major']['value'] beammin = summary['restoringbeam']['minor']['value'] beamunit = summary['restoringbeam']['minor']['unit'] beamang = summary['restoringbeam']['positionangle']['value'] angunit = summary['restoringbeam']['positionangle']['unit'] # @todo add to table comments? logging.info(" Fitted Gaussian size; NOT deconvolved source size.") logging.info(" Restoring Beam: Major axis: %10.3g %s , Minor axis: %10.3g %s , PA: %5.1f %s" % (beammaj, beamunit, beammin, beamunit, beamang, angunit)) # form into a xml table # output is a table_bdp self.addoutput(slbdp) # instantiate a plotter for all plots made herein myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir()) # make output png with circles marking sources found if mpl: circles=[] nx = data.shape[1] # data[] array was already flipud(rot90)'d ny = data.shape[0] # for (x,y) in zip(xtab,ytab): circles.append([x,y,1]) # @todo variable name if logscale: logging.warning("LogScaling applied") data = data/sigma data = np.where(data<0,-np.log10(1-data),+np.log10(1+data)) title = "SFind2D: %d sources" % nsources myplot.map1(data,title,slbase,thumbnail=True,circles=circles) #--------------------------------------------------------- # Get the figure and thumbmail names and create a caption #--------------------------------------------------------- imname = myplot.getFigure(figno=myplot.figno,relative=True) thumbnailname = myplot.getThumbnail(figno=myplot.figno,relative=True) caption = "Image of input map with sources found by SFind2D overlayed in green." slbdp.table.description="Table of source locations and sizes (not deconvolved)" #--------------------------------------------------------- # Add finder image to the BDP #--------------------------------------------------------- image = Image(images={bt.PNG: imname}, thumbnail=thumbnailname, thumbnailtype=bt.PNG, description=caption) slbdp.image.addimage(image, "finderimage") #------------------------------------------------------------- # Create the summary entry for the table and image #------------------------------------------------------------- self._summary["sources"] = SummaryEntry([slbdp.table.serialize(), slbdp.image.serialize()], "SFind2D_AT", self.id(True), taskargs) dt.tag("done") dt.end()
def get_mem(self): """ Read memory usage info from /proc/pid/status Return Virtual and Resident memory size in MBytes. """ global ostype if ostype == None: ostype = os.uname()[0].lower() logging.info("OSTYPE: %s" % ostype) scale = {'MB': 1024.0} lines = [] try: if ostype == 'linux': proc_status = '/proc/%d/status' % os.getpid() # linux only # open pseudo file /proc/<pid>/status t = open(proc_status) # get value from line e.g. 'VmRSS: 9999 kB\n' for it in t.readlines(): if 'VmSize' in it or 'VmRSS' in it: lines.append(it) t.close() else: proc = subprocess.Popen([ 'ps', '-o', 'rss', '-o', 'vsz', '-o', 'pid', '-p', str(os.getpid()) ], stdout=subprocess.PIPE) proc_output = proc.communicate()[0].split('\n') proc_output_memory = proc_output[1] proc_output_memory = proc_output_memory.split() phys_mem = int(proc_output_memory[0]) / 1204 # to MB virtual_mem = int(proc_output_memory[1]) / 1024 except (IOError, OSError): if self.report: logging.timing(self.label + " Error: cannot read memory usage information.") return np.array([]) # parse the two lines mem = {} if (ostype != 'darwin'): for line in lines: words = line.strip().split() #print words[0], '===', words[1], '===', words[2] # get rid of the tailing ':' key = words[0][:-1] # convert from KB to MB scaled = float(words[1]) / scale['MB'] mem[key] = scaled else: mem['VmSize'] = virtual_mem mem['VmRSS'] = phys_mem return np.array([mem['VmSize'], mem['VmRSS']])
def run(self): """ The run method, locates lines, attempts to identify them, and creates the BDP Parameters ---------- None Returns ------- None """ if not self.boxcar: logging.info("Boxcar smoothing turned off.") self._summary = {} self.freq = [] self.chan = [] dt = utils.Dtime("LineSegment") # timer for debugging spec_description = [] taskargs = self._taskargs() statbdp = None # for the CubeStats BDP specbdp = None # for the CubeSpectrum BDP specs = [] # to hold the input CubeSpectrum based spectra statspec = [] # to hold the input CubeStats based spectrum statseg = [] # to hold the detected segments from statspec specseg = [] # to hold the detected segments from specs #statcutoff = [] # cutoff for statspec line finding #speccutoff = [] # cutoff for specs line finding infile = "" if self.getkey("minchan") < 1: raise Exception("minchan must eb a positive value.") elif self.getkey("minchan") == 1 and self.getkey("iterate"): logging.info( "iterate=True is not allowed for minchan=1, setting iterate to False" ) self.setkey("iterate", False) vlsr = 0.0 # get the input bdp if self._bdp_in[0] is not None: specbdp = self._bdp_in[0] infile = specbdp.xmlFile if self._bdp_in[1] is not None: statbdp = self._bdp_in[1] infile = statbdp.xmlFile # still need to do this check since all are optional inputs if specbdp == statbdp is None: raise Exception("No input BDP's found.") imbase = self.mkext(infile, 'lseg') # grab any optional references overplotted on the "ll" plots # instantiate a plotter for all plots made herein self._plot_type = admit.util.PlotControl.SVG myplot = APlot(ptype=self._plot_type, pmode=self._plot_mode, abspath=self.dir()) dt.tag("start") ############################################################################ # Smoothing and continuum (baseline) subtraction of input spectra # ############################################################################ # get and smooth all input spectra basicsegment = { "method": self.getkey("segment"), "minchan": self.getkey("minchan"), "maxgap": self.getkey("maxgap"), "numsigma": self.getkey("numsigma"), "iterate": self.getkey("iterate"), "nomean": True } segargsforcont = { "name": "Line_Segment.%i.asap" % self.id(True), "pmin": self.getkey("numsigma"), "minchan": self.getkey("minchan"), "maxgap": self.getkey("maxgap") } if specbdp is not None: # get the spectrum specs = specutil.getspectrum(specbdp, vlsr, self.getkey("smooth"), self.getkey("recalcnoise"), basicsegment) # remove the continuum, if requested if self.getkey("csub")[1] is not None: logging.info( "Attempting Continuum Subtraction for Input Spectra") order = self.getkey("csub")[1] specutil.contsub(self.id(True), specs, self.getkey("segment"), segargsforcont, algorithm="PolyFit", **{"deg": order}) else: for spec in specs: spec.set_contin(np.zeros(len(spec))) for spec in specs: self.freq, self.chan = specutil.mergefreq( self.freq, self.chan, spec.freq(False), spec.chans(False)) # get any input cubestats if statbdp is not None: statspec = specutil.getspectrum(statbdp, vlsr, self.getkey("smooth"), self.getkey("recalcnoise"), basicsegment) # remove the continuum if self.getkey("csub")[0] is not None: logging.info( "Attempting Continuum Subtraction for Input CubeStats Spectra" ) order = self.getkey("csub")[0] specutil.contsub(self.id(True), statspec, self.getkey("segment"), segargsforcont, algorithm="PolyFit", **{"deg": order}) # The 'min' spectrum is inverted for segment finding. # Doesn't this mean it will also be plotted upside down? if len(statspec) > 0: statspec[1].invert() for spec in statspec: self.freq, self.chan = specutil.mergefreq( self.freq, self.chan, spec.freq(False), spec.chans(False)) dt.tag("getspectrum") if isinstance(self.freq, np.ndarray): self.freq = self.freq.tolist() if isinstance(self.chan, np.ndarray): self.chan = self.chan.tolist() # search for segments of spectral line emission #NB: this is repetitive with basicsegment above. method = self.getkey("segment") minchan = self.getkey("minchan") maxgap = self.getkey("maxgap") numsigma = self.getkey("numsigma") iterate = self.getkey("iterate") if specbdp is not None: logging.info("Detecting segments in CubeSpectrum based data") values = specutil.findsegments(specs, method, minchan, maxgap, numsigma, iterate) for i, t in enumerate(values): specseg.append(t[0]) specs[i].set_noise(t[2]) if statbdp is not None: logging.info("Detecting segments in CubeStats based data") values = specutil.findsegments(statspec, method, minchan, maxgap, numsigma, iterate) for i, t in enumerate(values): statseg.append(t[0]) # print ("MWP LINESEGMENT %d Setting noise=%f minchan=%d",(i,t[2],minchan)) statspec[i].set_noise(t[2]) #statcutoff.append(t[1]) dt.tag("segment finder") lsbdp = LineSegment_BDP(imbase) finalsegs = utils.mergesegments([statseg, specseg], len(self.freq)) lines = specutil.linedatafromsegments(self.freq, self.chan, finalsegs, specs, statspec) llist = [] for l in lines: lsbdp.addRow(l) llist.append(l) rdata = [] # create the output label = ["Peak/Noise", "Minimum/Noise"] caption = [ "Potential lines overlaid on peak intensity plot from CubeStats_BDP.", "Potential lines overlaid on minimum intensity plot from CubeStats_BDP." ] xlabel = "Frequency (GHz)" for i, spec in enumerate(statspec): freqs = [] for ch in statseg[i]: frq = [ min(spec.freq()[ch[0]], spec.freq()[ch[1]]), max(spec.freq()[ch[0]], spec.freq()[ch[1]]) ] freqs.append(frq) rdata.append(frq) #print("Stats segment, peak, ratio, fwhm ",lname,peak,ratio,fwhm) mult = 1. if i == 1: mult = -1. # print("MWP statspec plot cutoff[%d] = %f, contin=%f" % (i, (statspec[i].contin() + mult*(statspec[i].noise() * self.getkey("numsigma")))[0], statspec[i].contin()[0] ) ) myplot.segplotter( spec.freq(), spec.spec(csub=False), title="Detected Line Segments", xlab=xlabel, ylab=label[i], figname=imbase + "_statspec%i" % i, segments=freqs, cutoff=(spec.contin() + mult * (spec.noise() * self.getkey("numsigma"))), continuum=spec.contin(), thumbnail=True) imname = myplot.getFigure(figno=myplot.figno, relative=True) thumbnailname = myplot.getThumbnail(figno=myplot.figno, relative=True) image = Image(images={bt.SVG: imname}, thumbnail=thumbnailname, thumbnailtype=bt.PNG, description=caption[i]) lsbdp.image.addimage(image, "statspec%i" % i) spec_description.append([ lsbdp.ra, lsbdp.dec, "", xlabel, imname, thumbnailname, caption[i], infile ]) for i in range(len(specs)): freqs = [] for ch in specseg[i]: frq = [ min(specs[i].freq()[ch[0]], specs[i].freq()[ch[1]]), max(specs[i].freq()[ch[0]], specs[i].freq()[ch[1]]) ] freqs.append(frq) rdata.append(frq) myplot.segplotter(specs[i].freq(), specs[i].spec(csub=False), title="Detected Line Segments", xlab=xlabel, ylab="Intensity", figname=imbase + "_spec%03d" % i, segments=freqs, cutoff=specs[i].contin() + (specs[i].noise() * self.getkey("numsigma")), continuum=specs[i].contin(), thumbnail=True) imname = myplot.getFigure(figno=myplot.figno, relative=True) thumbnailname = myplot.getThumbnail(figno=myplot.figno, relative=True) caption = "Detected line segments from input spectrum #%i." % (i) image = Image(images={bt.SVG: imname}, thumbnail=thumbnailname, thumbnailtype=bt.PNG, description=caption) lsbdp.image.addimage(image, "spec%03d" % i) spec_description.append([ lsbdp.ra, lsbdp.dec, "", xlabel, imname, thumbnailname, caption, infile ]) caption = "Merged segments overlaid on CubeStats spectrum" myplot.summaryspec(statspec, specs, None, imbase + "_summary", llist) imname = myplot.getFigure(figno=myplot.figno, relative=True) thumbnailname = myplot.getThumbnail(figno=myplot.figno, relative=True) caption = "Identified segments overlaid on Signal/Noise plot of all spectra." image = Image(images={bt.SVG: imname}, thumbnail=thumbnailname, thumbnailtype=bt.PNG, description=caption) lsbdp.image.addimage(image, "summary") spec_description.append([ lsbdp.ra, lsbdp.dec, "", "Signal/Noise", imname, thumbnailname, caption, infile ]) self._summary["segments"] = SummaryEntry(lsbdp.table.serialize(), "LineSegment_AT", self.id(True), taskargs) self._summary["spectra"] = [ SummaryEntry(spec_description, "LineSegment_AT", self.id(True), taskargs) ] self.addoutput(lsbdp) logging.regression("LINESEG: %s" % str(rdata)) dt.tag("done") dt.end()
def run(self): """Runs the task. Parameters ---------- None Returns ------- None """ self._summary = {} dt = utils.Dtime("CubeSpectrum") # our BDP's # b1 = input BDP # b1s = optional input CubeSpectrum # b1m = optional input Moment # b1p = optional input SourceList for positions # b2 = output BDP b1 = self._bdp_in[0] # check input SpwCube (or LineCube) fin = b1.getimagefile(bt.CASA) if self._bdp_in[0]._type == bt.LINECUBE_BDP: use_vel = True else: use_vel = False sources = self.getkey("sources") pos = [ ] # blank it first, then try and grab it from the optional bdp_in's cmean = 0.0 csigma = 0.0 smax = [] # accumulate max in each spectrum for regression self.spec_description = [] # for summary() if self._bdp_in[1] != None: # check if CubeStats_BDP #print "BDP[1] type: ",self._bdp_in[1]._type if self._bdp_in[1]._type != bt.CUBESTATS_BDP: raise Exception, "bdp_in[1] not a CubeStats_BDP, should never happen" # a table (cubestats) b1s = self._bdp_in[1] pos.append(b1s.maxpos[0]) pos.append(b1s.maxpos[1]) logging.info('CubeStats::maxpos,val=%s,%f' % (str(b1s.maxpos), b1s.maxval)) cmean = b1s.mean csigma = b1s.sigma dt.tag("CubeStats-pos") if self._bdp_in[ 2] != None: # check if Moment_BDP (probably from CubeSum) #print "BDP[2] type: ",self._bdp_in[2]._type if self._bdp_in[2]._type != bt.MOMENT_BDP: raise Exception, "bdp_in[2] not a Moment_BDP, should never happen" b1m = self._bdp_in[2] fim = b1m.getimagefile(bt.CASA) pos1, maxval = self.maxpos_im( self.dir(fim)) # compute maxpos, since it is not in bdp (yet) logging.info('CubeSum::maxpos,val=%s,%f' % (str(pos1), maxval)) pos.append(pos1[0]) pos.append(pos1[1]) dt.tag("Moment-pos") if self._bdp_in[3] != None: # check if SourceList #print "BDP[3] type: ",self._bdp_in[3]._type # a table (SourceList) b1p = self._bdp_in[3] ra = b1p.table.getFullColumnByName("RA") dec = b1p.table.getFullColumnByName("DEC") peak = b1p.table.getFullColumnByName("Peak") if sources == []: # use the whole SourceList for (r, d, p) in zip(ra, dec, peak): rdc = convert_sexa(r, d) pos.append(rdc[0]) pos.append(rdc[1]) logging.info('SourceList::maxpos,val=%s,%f' % (str(rdc), p)) else: # select specific ones from the source list for ipos in sources: if ipos < len(ra): radec = convert_sexa(ra[ipos], dec[ipos]) pos.append(radec[0]) pos.append(radec[1]) logging.info('SourceList::maxpos,val=%s,%f' % (str(radec), peak[ipos])) else: logging.warning('Skipping illegal source number %d' % ipos) dt.tag("SourceList-pos") # if pos[] still blank, use the AT keyword. if len(pos) == 0: pos = self.getkey("pos") # if still none, try the map center if len(pos) == 0: # @todo this could result in a masked pixel and cause further havoc # @todo could also take the reference pixel, but that could be outside image taskinit.ia.open(self.dir(fin)) s = taskinit.ia.summary() pos = [int(s['shape'][0]) / 2, int(s['shape'][1]) / 2] logging.warning( "No input positions supplied, map center choosen: %s" % str(pos)) dt.tag("map-center") # exhausted all sources where pos[] can be set; if still zero, bail out if len(pos) == 0: raise Exception, "No positions found from input BDP's or pos=" # convert this regular list to a list of tuples with duplicates removed # sadly the order is lost. pos = list(set(zip(pos[0::2], pos[1::2]))) npos = len(pos) dt.tag("open") bdp_name = self.mkext(fin, "csp") b2 = CubeSpectrum_BDP(bdp_name) self.addoutput(b2) imval = range(npos) # spectra, one for each pos (placeholder) planes = range(npos) # labels for the tables (placeholder) images = {} # png's accumulated for i in range(npos): # loop over pos, they can have mixed types now sd = [] caption = "Spectrum" xpos = pos[i][0] ypos = pos[i][1] if type(xpos) != type(ypos): print "POS:", xpos, ypos raise Exception, "position pair not of the same type" if type(xpos) == int: # for integers, boxes are allowed, even multiple box = '%d,%d,%d,%d' % (xpos, ypos, xpos, ypos) # convention for summary is (box) cbox = '(%d,%d,%d,%d)' % (xpos, ypos, xpos, ypos) # use extend here, not append, we want individual values in a list sd.extend([xpos, ypos, cbox]) caption = "Average Spectrum at %s" % cbox if False: # this will fail on 3D cubes (see CAS-7648) imval[i] = casa.imval(self.dir(fin), box=box) else: # work around that CAS-7648 bug # another approach is the ia.getprofile(), see CubeStats, this will # also integrate over regions, imval will not (!!!) region = 'centerbox[[%dpix,%dpix],[1pix,1pix]]' % (xpos, ypos) caption = "Average Spectrum at %s" % region imval[i] = casa.imval(self.dir(fin), region=region) elif type(xpos) == str: # this is tricky, to stay under 1 pixel , or you get a 2x2 back. region = 'centerbox[[%s,%s],[1pix,1pix]]' % (xpos, ypos) caption = "Average Spectrum at %s" % region sd.extend([xpos, ypos, region]) imval[i] = casa.imval(self.dir(fin), region=region) else: print "Data type: ", type(xpos) raise Exception, "Data type for region not handled" dt.tag("imval") flux = imval[i]['data'] if len(flux.shape ) > 1: # rare case if we step on a boundary between cells? logging.warning( "source %d has spectrum shape %s: averaging the spectra" % (i, repr(flux.shape))) flux = np.average(flux, axis=0) logging.debug('minmax: %f %f %d' % (flux.min(), flux.max(), len(flux))) smax.append(flux.max()) if i == 0: # for first point record few extra things if len(imval[i]['coords'].shape) == 2: # normal case: 1 pixel freqs = imval[i]['coords'].transpose( )[2] / 1e9 # convert to GHz @todo: input units ok? elif len(imval[i]['coords'].shape ) == 3: # rare case if > 1 point in imval() freqs = imval[i]['coords'][0].transpose( )[2] / 1e9 # convert to GHz @todo: input units ok? else: logging.fatal( "bad shape %s in freq return from imval - SHOULD NEVER HAPPEN" % imval[i]['coords'].shape) chans = np.arange(len(freqs)) # channels 0..nchans-1 unit = imval[i]['unit'] restfreq = casa.imhead( self.dir(fin), mode="get", hdkey="restfreq")['value'] / 1e9 # in GHz dt.tag("imhead") vel = ( 1 - freqs / restfreq ) * utils.c # @todo : use a function (and what about relativistic?) # construct the Table for CubeSpectrum_BDP # @todo note data needs to be a tuple, later to be column_stack'd labels = ["channel", "frequency", "flux"] units = ["number", "GHz", unit] data = (chans, freqs, flux) if i == 0: # plane 0 : we are allowing a multiplane table, so the first plane is special table = Table(columns=labels, units=units, data=np.column_stack(data), planes=["0"]) else: # planes 1,2,3.... are stacked onto the previous one table.addPlane(np.column_stack(data), "%d" % i) # example plot , one per position for now if use_vel: x = vel xlab = 'VLSR (km/s)' else: x = chans xlab = 'Channel' y = [flux] sd.append(xlab) if type(xpos) == int: # grab the RA/DEC... kludgy h = casa.imstat(self.dir(fin), box=box) ra = h['blcf'].split(',')[0] dec = h['blcf'].split(',')[1] title = '%s %d @ %d,%d = %s,%s' % (bdp_name, i, xpos, ypos, ra, dec) else: title = '%s %d @ %s,%s' % ( bdp_name, i, xpos, ypos ) # or use box, once we allow non-points myplot = APlot(ptype=self._plot_type, pmode=self._plot_mode, abspath=self.dir()) ylab = 'Flux (%s)' % unit p1 = "%s_%d" % (bdp_name, i) myplot.plotter(x, y, title, p1, xlab=xlab, ylab=ylab, thumbnail=True) # Why not use p1 as the key? ii = images["pos%d" % i] = myplot.getFigure(figno=myplot.figno, relative=True) thumbname = myplot.getThumbnail(figno=myplot.figno, relative=True) sd.extend([ii, thumbname, caption, fin]) self.spec_description.append(sd) logging.regression("CSP: %s" % str(smax)) image = Image(images=images, description="CubeSpectrum") b2.setkey("image", image) b2.setkey("table", table) b2.setkey("sigma", csigma) # TODO: not always available b2.setkey("mean", cmean) # TODO: not always available if True: # @todo only first plane due to limitation in exportTable() islash = bdp_name.find('/') if islash < 0: tabname = self.dir("testCubeSpectrum.tab") else: tabname = self.dir(bdp_name[:islash] + "/testCubeSpectrum.tab") table.exportTable(tabname, cols=["frequency", "flux"]) dt.tag("done") # For a single spectrum this is # SummaryEntry([[data for spec1]], "CubeSpectrum_AT",taskid) # For multiple spectra this is # SummaryEntry([[data for spec1],[data for spec2],...], "CubeSpectrum_AT",taskid) self._summary["spectra"] = SummaryEntry(self.spec_description, "CubeSpectrum_AT", self.id(True)) taskargs = "pos=" + str(pos) taskargs += ' <span style="background-color:white"> ' + fin.split( '/')[0] + ' </span>' for v in self._summary: self._summary[v].setTaskArgs(taskargs) dt.tag("summary") dt.end()
def run(self): """ The run method, creates the slices, regrids if requested, and creates the BDP(s) Parameters ---------- None Returns ------- None """ dt = utils.Dtime("LineCube") self._summary = {} # look for an input noise level, either through keyword or input # CubeStats BDP or calculate it if needed pad = self.getkey("pad") fpad = self.getkey("fpad") equalize = self.getkey("equalize") minchan = 0 linelist = self._bdp_in[1] if linelist == None or len(linelist) == 0: logging.info("No lines found in input LineList_BDP, exiting.") return spw = self._bdp_in[0] # get the columns from the table cols = linelist.table.getHeader() # get the casa image imagename = spw.getimagefile(bt.CASA) imh = imhead(self.dir(imagename), mode='list') # set the overall parameters for imsubimage args = {"imagename": self.dir(imagename), "overwrite": True} dt.tag("start") if pad != 0 or fpad > 0: nchan = imh['shape'][2] dt.tag("pad") # if equal size cubes are requested, this will honor the requested pad if equalize: start = linelist.table.getColumnByName("startchan") end = linelist.table.getColumnByName("endchan") # look for the widest line for i in range(len(start)): diff = end[i] - start[i] + 1 if fpad > 0: minchan = max(minchan, diff * int(1 + 2 * fpad)) else: minchan = max(minchan, diff + (2 * pad)) dt.tag("equalize") # get all of the rows in the table rows = linelist.getall() delrow = set() procblend = [0] # search through looking for blended lines, leave only the strongest from each blend # in the list for i, row in enumerate(rows): if row.blend in procblend: continue strongest = -100. index = -1 indexes = [] blend = row.blend for j in range(i, len(rows)): if rows[j].blend != blend: continue indexes.append(j) if rows[j].linestrength > strongest: strongest = rows[j].linestrength index = j indexes.remove(index) delrow = delrow | set(indexes) procblend.append(blend) dr = list(delrow) dr.sort() dr.reverse() for row in dr: del rows[row] # check on duplicate UID's, since those are the directory names here uid1 = [] for row in rows: uid1.append(row.getkey("uid")) uid2 = set(uid1) if len(uid1) != len(uid2): print "LineList:", uid1 logging.warning("There are duplicate names in the LineList") #raise Exception,"There are duplicate names in the LineList" # Create Summary table lc_description = admit.util.Table() lc_description.columns = [ "Line Name", "Start Channel", "End Channel", "Output Cube" ] lc_description.units = ["", "int", "int", ""] lc_description.description = "Parameters of Line Cubes" # loop over all entries in the line list rdata = [] for row in rows: uid = row.getkey("uid") cdir = self.mkext(imagename, uid) self.mkdir(cdir) basefl = uid lcd = [basefl] outfl = cdir + os.sep + "lc.im" args["outfile"] = self.dir(outfl) start = row.getkey("startchan") end = row.getkey("endchan") diff = end - start + 1 startch = 0 if diff < minchan: add = int(math.ceil(float(minchan - diff) / 2.0)) start -= add end += add startch += add if start < 0: logging.info( "%s is too close to the edge to encompass with the " + "requested channels, start=%d resetting to 0" % (uid, start)) startch += abs(start) start = 0 if end >= nchan: logging.info( "%s is too close to the edge to encompass with the " + "requested channels, end=%d resetting to %d" % (uid, end, nchan - 1)) end = nchan - 1 #print "\n\nDIFF ",startch,"\n\n" if not equalize: if fpad > 0: diff = end - start + 1 start -= int(fpad * diff) end += int(fpad * diff) if start < 0: logging.warning( "fpad=%d too large, start=%d resetting to 0" % (int(fpad * diff), start)) startch += abs(start) start = 0 else: startch += int(fpad * diff) if end >= nchan: logging.warning( "fpad=%d too large, end=%d resetting to %d" % (int(fpad * diff), end, nchan - 1)) end = nchan - 1 elif pad > 0: start -= pad end += pad if start < 0: logging.warning( "pad=%d too large, start=%d resetting to 0" % (pad, start)) startch += abs(start) start = 0 else: startch += pad if end >= nchan: logging.warning( "pad=%d too large, end=%d resetting to %d" % (pad, end, nchan - 1)) end = nchan - 1 elif pad < 0: mid = (start + end) / 2 start = mid + pad / 2 end = mid - pad / 2 - 1 if start < 0: logging.warning( "pad=%d too large, start=%d resetting to 0" % (pad, start)) startch += abs(start) start = 0 else: startch += abs(start) if end >= nchan: logging.warning( "pad=%d too large, end=%d resetting to %d" % (pad, end, nchan - 1)) end = nchan - 1 endch = startch + diff args["chans"] = "%i~%i" % (start, end) rdata.append(start) rdata.append(end) # for the summmary, which will be a table of # Line name, start channel, end channel, output image lc_description.addRow([basefl, start, end, outfl]) # create the slices imsubimage(**args) line = row.converttoline() # set the restfrequency ouf the output cube imhead(imagename=args["outfile"], mode="put", hdkey="restfreq", hdvalue="%fGHz" % (row.getkey("frequency"))) # set up the output BDP images = {bt.CASA: outfl} casaimage = Image(images=images) # note that Summary.getLineFluxes() implicitly relies on the BDP out order # being the same order as in the line list table. If this is ever not # true, then Summary.getLineFluxes mismatch BDPs and flux values. #self.addoutput(LineCube_BDP(xmlFile=cdir + os.sep + basefl + ".lc", self.addoutput( LineCube_BDP(xmlFile=outfl, image=casaimage, line=line, linechans="%i~%i" % (startch, endch))) dt.tag("trans-%s" % cdir) logging.regression("LC: %s" % str(rdata)) taskargs = "pad=%s fpad=%g equalize=%s" % (pad, fpad, equalize) self._summary["linecube"] = SummaryEntry(lc_description.serialize(), "LineCube_AT", self.id(True), taskargs) dt.tag("done") dt.end()
def endElement(self, name): """ Method called whenever the end of an xml element is reached. This method is only called by the SAX parser iteself. Parameters ---------- name : str The name of the node that just ended Returns ------- None """ # reset the tracking stuff, add BDP's to AT's, AT's to the flowmanager # reconstruct any nodes that spanned multiple lines if name == self.utilName: # add the utility classes to the appropriate parent class # Images always get added to MultiImages if self.inMulti: self.MultiImage.addimage(copy.deepcopy(self.Util), self.Util.name) elif self.inBDP: setattr(self.BDP, self.utilName, copy.deepcopy(self.Util)) elif self.inAT: setattr(self.curAT, self.utilName, copy.deepcopy(self.Util)) self.inUtil = False self.utilName = "" elif name == self.multiName: if self.inBDP: setattr(self.BDP, self.multiName, copy.deepcopy(self.MultiImage)) elif self.inAT: setattr(self.curAT, self.multiName, copy.deepcopy(self.MultiImage)) self.multiImageName = "" self.inMulti = False self.inUtil = False elif name == bt.BDP: # one last validation run self.BDP._baseDir = self.basedir if not self.dtd.checkAll(): logging.info("Some required nodes missing from xml file, attempting to continue anyway.") elif name == bt.FLOWMANAGER: temp = aast.literal_eval(self.flowdata) for key in ["depsmap", "varimap"]: if key in temp: temp[key] = eval(temp[key]) self.flowmanager = fm.FlowManager(**temp) self.inflow = False elif isinstance(self.type, str): if self.inUtil: target = self.Util elif self.inBDP: target = self.BDP elif self.inAT: target = self.curAT elif self.inSummaryEntry: target = self.summaryEntry elif name == "projmanager": target = self else: target = self.admit self.setattr(target, name, self.tempdata) self.tempdata = "" elif isinstance(self.type, list) or isinstance(self.type, dict) \ or isinstance(self.type, tuple) or isinstance(self.type, set): temp = aast.literal_eval(self.tempdata) if self.inUtil: target = self.Util elif self.inBDP: target = self.BDP elif self.inAT: target = self.curAT elif self.inSummaryEntry: target = self.summaryEntry elif name == "projmanager": target = self else: target = self.admit for i in self.ndarr: temp[i] = np.array(temp[i], dtype=object) for i in self.sets: temp[i] = set(temp[i]) if isinstance(self.type, tuple): temp = tuple(temp) elif isinstance(self.type, set): temp = set(temp) try: self.setattr(target, name, temp) except AttributeError: logging.info("Data member %s is not a member of %s. This may be due to a version mismatch between the data and your software, attempting to continue." % (self.name, str(type(target)))) except: raise elif isinstance(self.type, np.ndarray): temp = aast.literal_eval(self.tempdata) if self.inUtil: target = self.Util elif self.inBDP: target = self.BDP elif self.inAT: target = self.curAT else: target = self.admit try: self.setattr(target, self.name, np.array(temp, dtype=object)) except AttributeError: logging.info("Data member %s is not a member of %s. This may be due to a version mismatch between the data and your software, attempting to continue." % (self.name, str(type(target)))) except: raise elif self.inAT and name == self.curAT.show(): self.inAdmit = False # one last validation run self.curAT._bdp_in = [None] * len(self.curAT._bdp_in_map) self.curAT._bdp_out = [None] * len(self.curAT._bdp_out_map) self.curAT._baseDir = self.basedir at = copy.deepcopy(self.curAT) self.AT.append(at) self.flowmanager[at._taskid] = at self.curAT = None self.inAT = False elif name == bt.ADMIT: self.inAdmit = False if not self.dtd.checkAll(): print "Some required nodes missing from admit.xml file, attempting to continue anyway" elif name == "_keys": self.inKeys = False elif name == self.summaryEntryName: self.summaryData._metadata[self.metadataName].append(copy.deepcopy(self.summaryEntry)) self.summaryEntryName = None self.inSummaryEntry = False elif name == self.summaryName: self.inSummary = False elif name == self.metadataName: self.metadataName = None else: self.ndarr = [] self.sets = [] self.type = None self.name = None self.ndarr = False
def run(self): # self._summary = {} # prepare to make a summary here dt = utils.Dtime("Ingest") # timer for debugging do_cbeam = True # enforce a common beam # pb = self.getkey('pb') do_pb = len(pb) > 0 use_pb = self.getkey("usepb") # create_mask = self.getkey('mask') # create a new mask ? box = self.getkey("box") # corners in Z, XY or XYZ edge = self.getkey("edge") # number of edge channels to remove restfreq = self.getkey("restfreq") # < 0 means not activated # smooth= could become deprecated, and/or include a decimation option to make it useful # again, Smooth_AT() does this also , at the cost of an extra cube to store smooth = self.getkey("smooth") # # vlsr = self.getkey("vlsr") # see also LineID, where this could be given again # first place a fits file in the admit project directory (symlink) # this is a bit involved, depending on if an absolute or relative path was # give to Ingest_AT(file=) fitsfile = self.getkey('file') if fitsfile[0] != os.sep: fitsfile = os.path.abspath(os.getcwd() + os.sep + fitsfile) logging.debug('FILE=%s' % fitsfile) if fitsfile[0] != os.sep: raise Exception,"Bad file=%s, expected absolute name",fitsfile # now determine if it could have been a CASA (or MIRIAD) image already # which we'll assume if it's a directory; this is natively supported by CASA # but there are tools where if you pass it a FITS or MIRIAD # MIRIAD is not recommended for serious work, especially big files, since there # is a performance penalty due to tiling. file_is_casa = casautil.iscasa(fitsfile) loc = fitsfile.rfind(os.sep) # find the '/' ffile0 = fitsfile[loc+1:] # basename.fits basename = self.getkey('basename') # (new) basename allowed (allow no dots?) if len(basename) == 0: basename = ffile0[:ffile0.rfind('.')] # basename logging.info("basename=%s" % basename) target = self.dir(ffile0) if not os.path.exists(target) : cmd = 'ln -s "%s" "%s"' % (fitsfile, target) logging.debug("CMD: %s" % cmd) os.system(cmd) readonly = False if file_is_casa: logging.debug("Assuming input %s is a CASA (or MIRIAD) image" % ffile0) bdpfile = self.mkext(basename,"im") if bdpfile == ffile0: logging.warning("No selections allowed on CASA image, since no alias was given") readonly = True b1 = SpwCube_BDP(bdpfile) self.addoutput(b1) b1.setkey("image", Image(images={bt.CASA:bdpfile})) # @todo b2 and PB? else: # construct the output name and construct the BDP based on the CASA image name # this also takes care of the behind the scenes alias= substitution bdpfile = self.mkext(basename,"im") if bdpfile == basename: raise Exception,"basename and bdpfile are the same, Ingest_AT needs a fix for this" b1 = SpwCube_BDP(bdpfile) self.addoutput(b1) if do_pb: print "doing the PB" bdpfile2 = self.mkext(basename,"pb") b2 = Image_BDP(bdpfile2) self.addoutput(b2) # @todo we should also set readonly=True if no box, no mask etc. and still an alias # that way it will speed up and not make a copy of the image ? # fni and fno are full (abspath) filenames, ready for CASA # fni is the same as fitsfile fni = self.dir(ffile0) fno = self.dir(bdpfile) if do_pb: fno2 = self.dir(bdpfile2) dt.tag("start") if file_is_casa: taskinit.ia.open(fni) else: if do_pb and use_pb: # @todo this needs a fix for the path for pb, only works if abs path is given # impbcor(im.fits,pb.fits,out.im,overwrite=True,mode='m') if False: # this may seem like a nice shortcut, to have the fits->casa conversion be done # internally in impbcor, but it's a terrible performance for big cubes. (tiling?) # we keep this code here, perhaps at some future time (mpi?) this performs better # @todo fno2 impbcor(fni,pb,fno,overwrite=True,mode='m') dt.tag("impbcor-1") else: # the better way is to convert FITS->CASA and then call impbcor() # the CPU savings are big, but I/O overhead can still be substantial taskinit.ia.fromfits('_pbcor',fni,overwrite=True) taskinit.ia.fromfits('_pb',pb,overwrite=True) dt.tag("impbcor-1f") if False: impbcor('_pbcor','_pb',fno,overwrite=True,mode='m') # @todo fno2 utils.remove('_pbcor') utils.remove('_pb') dt.tag("impbcor-2") else: # immath appears to be even faster (2x in CPU) # https://bugs.nrao.edu/browse/CAS-8299 # @todo this needs to be confirmed that impbcor is now good to go (r36078) casa.immath(['_pbcor','_pb'],'evalexpr',fno,'IM0*IM1') dt.tag("immath") if True: # use the mean of all channels... faster may be to use the middle plane # barf; edge channels can be with fewer subfields in a mosaic taskinit.ia.open('_pb') taskinit.ia.summary() ia1=taskinit.ia.moments(moments=[-1],drop=True,outfile=fno2) ia1.done() taskinit.ia.close() dt.tag("moments") utils.remove('_pbcor') utils.remove('_pb') dt.tag("impbcor-3") elif do_pb and not use_pb: # cheat case: PB was given, but not meant to be used # not implemented yet print "cheat case dummy PB not implemented yet" else: # no PB given if True: # re-running this was more consistently faster in wall clock time # note that zeroblanks=True will still keep the mask logging.debug("casa::ia.fromfits(%s) -> %s" % (fni,bdpfile)) taskinit.ia.fromfits(fno,fni,overwrite=True) #taskinit.ia.fromfits(fno,fni,overwrite=True,zeroblanks=True) dt.tag("fromfits") else: # not working to extend 3D yet, but this would solve the impv() 3D problem logging.debug("casa::importfits(%s) -> %s" % (fni,bdpfile)) #casa.importfits(fni,fno,defaultaxes=True,defaultaxesvalues=[None,None,None,'I']) # possible bug: zeroblanks=True has no effect? casa.importfits(fni,fno,zeroblanks=True) dt.tag("importfits") taskinit.ia.open(fno) if len(smooth) > 0: # smooth here, but Smooth_AT is another option # here we only allow pixel smoothing # spatial: gauss # spectral: boxcar/hanning (check for flux conservation) # is the boxcar wrong, not centered, but edged? # @todo CASA BUG: this will loose the object name (and maybe more?) from header, so VLSR lookup fails fnos = fno + '.smooth' taskinit.ia.convolve2d(outfile=fnos, overwrite=True, pa='0deg', major='%gpix' % smooth[0], minor='%gpix' % smooth[1], type='gaussian') taskinit.ia.close() srcname = casa.imhead(fno,mode="get",hdkey="object") # work around CASA bug #@todo use safer ia.rename() here. # https://casa.nrao.edu/docs/CasaRef/image.rename.html utils.rename(fnos,fno) casa.imhead(fno,mode="put",hdkey="object",hdvalue=srcname) # work around CASA bug dt.tag("convolve2d") if len(smooth) > 2 and smooth[2] > 0: if smooth[2] == 1: # @todo only 1 channel option specsmooth(fno,fnos,axis=2,function='hanning',dmethod="") else: # @todo may have the wrong center specsmooth(fno,fnos,axis=2,function='boxcar',dmethod="",width=smooth[2]) #@todo use safer ia.rename() here. # https://casa.nrao.edu/docs/CasaRef/image.rename.html utils.rename(fnos,fno) dt.tag("specsmooth") taskinit.ia.open(fno) s = taskinit.ia.summary() if len(s['shape']) != 4: logging.warning("Adding dummy STOKES-I axis") fnot = fno + '_4' taskinit.ia.adddegaxes(stokes='I',outfile=fnot) taskinit.ia.close() #@todo use safer ia.rename() here. # https://casa.nrao.edu/docs/CasaRef/image.rename.html utils.rename(fnot,fno) taskinit.ia.open(fno) dt.tag("adddegaxes") else: logging.info("SHAPE: %s" % str(s['shape'])) s = taskinit.ia.summary() dt.tag("summary-0") if s['hasmask'] and create_mask: logging.warning("no extra mask created because input image already had one") create_mask = False # if a box= or edge= was given, only a subset of the cube needs to be ingested # this however complicates PB correction later on if len(box) > 0 or len(edge) > 0: if readonly: raise Exception,"Cannot use box= or edge=, data is read-only, or use an basename/alias" if len(edge) == 1: edge.append(edge[0]) nx = s['shape'][0] ny = s['shape'][1] nz = s['shape'][2] logging.info("box=%s edge=%s processing with SHAPE: %s" % (str(box),str(edge),str(s['shape']))) if len(box) == 2: # select zrange if len(edge)>0: raise Exception,"Cannot use edge= when box=[z1,z2] is used" r1 = taskinit.rg.box([0,0,box[0]] , [nx-1,ny-1,box[1]]) elif len(box) == 4: if len(edge) == 0: # select just an XY box r1 = taskinit.rg.box([box[0],box[1]] , [box[2],box[3]]) elif len(edge) == 2: # select an XY box, but remove some edge channels r1 = taskinit.rg.box([box[0],box[1],edge[0]] , [box[2],box[3],nz-edge[1]-1]) else: raise Exception,"Bad edge= for len(box)=4" elif len(box) == 6: # select an XYZ box r1 = taskinit.rg.box([box[0],box[1],box[2]] , [box[3],box[4],box[5]]) elif len(edge) == 2: # remove some edge channels, but keep the whole XY box r1 = taskinit.rg.box([0,0,edge[0]] , [nx-1,ny-1,nz-edge[1]-1]) else: raise Exception,"box=%s illegal" % box logging.debug("BOX/EDGE selection: %s %s" % (str(r1['blc']),str(r1['trc']))) #if taskinit.ia.isopen(): taskinit.ia.close() logging.info("SUBIMAGE") subimage = taskinit.ia.subimage(region=r1,outfile=fno+'.box',overwrite=True) taskinit.ia.close() taskinit.ia.done() subimage.rename(fno,overwrite=True) subimage.close() subimage.done() taskinit.ia.open(fno) dt.tag("subimage-1") else: # the whole cube is passed onto ADMIT if readonly and create_mask: raise Exception,"Cannot use mask=True, data read-only, or use an alias" if file_is_casa and not readonly: # @todo a miriad file - which should be read only - will also create a useless copy here if no alias used taskinit.ia.subimage(overwrite=True,outfile=fno) taskinit.ia.close() taskinit.ia.open(fno) dt.tag("subimage-0") if create_mask: if readonly: raise Exception,"Cannot create mask, data read-only, or use an alias" # also check out the 'fromfits::zeroblanks = False' # calcmask() will overwrite any previous pixelmask #taskinit.ia.calcmask('mask("%s") && "%s" != 0.0' % (fno,fno)) taskinit.ia.calcmask('"%s" != 0.0' % fno) dt.tag("mask") s = taskinit.ia.summary() dt.tag("summary-1") # do a fast statistics (no median or robust) s0 = taskinit.ia.statistics() dt.tag("statistics") if len(s0['npts']) == 0: raise Exception,"No statistics possible, are there valid data in this cube?" # There may be multiple beams per plane so we can't # rely on the BEAM's 'major', 'minor', 'positionangle' being present. # ia.commonbeam() is guaranteed to return beam parameters # if present if do_cbeam and s.has_key('perplanebeams'): # report on the beam extremities, need to loop over all, # first and last don't need to be extremes.... n = s['perplanebeams']['nChannels'] ab0 = '*0' bb0 = s['perplanebeams']['beams'][ab0]['*0'] bmaj0 = bb0['major']['value'] bmin0 = bb0['minor']['value'] beamd = 0.0 for i in range(n): ab1 = '*%d' % i bb1 = s['perplanebeams']['beams'][ab1]['*0'] bmaj1 = bb1['major']['value'] bmin1 = bb1['minor']['value'] beamd = max(beamd,abs(bmaj0-bmaj1),abs(bmin0-bmin1)) logging.warning("MAX-BEAMSPREAD %f" % (beamd)) # if True: logging.info("Applying a commonbeam from the median beam accross the band") # imhead is a bit slow; alternatively use ia.summary() at the half point for setrestoringbeam() h = casa.imhead(fno,mode='list') b = h['perplanebeams']['median area beam'] taskinit.ia.setrestoringbeam(remove=True) taskinit.ia.setrestoringbeam(beam=b) commonbeam = taskinit.ia.commonbeam() else: # @todo : this will be VERY slow - code not finished, needs renaming etc. # this is however formally the better solution logging.warning("commmonbeam code not finished") cb = taskinit.ia.commonbeam() taskinit.ia.convolve2d(outfile='junk-common.im', major=cb['major'], minor=cb['minor'], pa=cb['pa'], targetres=True, overwrite=True) dt.tag('convolve2d') commonbeam = {} else: try: commonbeam = taskinit.ia.commonbeam() except: nppb = 4.0 logging.warning("No synthesized beam found, faking one to prevent downstream problems: nppb=%f" % nppb) s = taskinit.ia.summary() cdelt2 = abs(s['incr'][0]) * 180.0/math.pi*3600.0 bmaj = nppb * cdelt2 # use a nominal 4 points per (round) beam bmin = nppb * cdelt2 bpa = 0.0 taskinit.ia.setrestoringbeam(major='%farcsec' % bmaj, minor='%farcsec' % bmin, pa='%fdeg' % bpa) commonbeam = {} logging.info("COMMONBEAM[%d] %s" % (len(commonbeam),str(commonbeam))) first_point = taskinit.ia.getchunk(blc=[0,0,0,0],trc=[0,0,0,0],dropdeg=True) logging.debug("DATA0*: %s" % str(first_point)) taskinit.ia.close() logging.info('BASICS: [shape] npts min max: %s %d %f %f' % (s['shape'],s0['npts'][0],s0['min'][0],s0['max'][0])) logging.info('S/N (all data): %f' % (s0['max'][0]/s0['rms'][0])) npix = 1 nx = s['shape'][0] ny = s['shape'][1] nz = s['shape'][2] for n in s['shape']: npix = npix * n ngood = int(s0['npts'][0]) fgood = (1.0*ngood)/npix logging.info('GOOD PIXELS: %d/%d (%f%% good or %f%% bad)' % (ngood,npix,100.0*fgood,100.0*(1 - fgood))) if s['hasmask']: logging.warning('MASKS: %s' % (str(s['masks']))) if not file_is_casa: b1.setkey("image", Image(images={bt.CASA:bdpfile})) if do_pb: b2.setkey("image", Image(images={bt.CASA:bdpfile2})) # cube sanity: needs to be either 4D or 2D. But p-p-v cube # alternative: ia.subimage(dropdeg = True) # see also: https://bugs.nrao.edu/browse/CAS-5406 shape = s['shape'] if len(shape)>3: if shape[3]>1: # @todo this happens when you ingest a fits or casa image which is ra-dec-pol-freq if nz > 1: msg = 'Ingest_AT: cannot deal with real 4D cubes yet' logging.critical(msg) raise Exception,msg else: # @todo this is not working yet when the input was a casa image, but ok when fits. go figure. fnot = fno + ".trans" if True: # this works #@todo use safer ia.rename() here. # https://casa.nrao.edu/docs/CasaRef/image.rename.html utils.rename(fno,fnot) imtrans(fnot,fno,"0132") utils.remove(fnot) else: # this does not work, what the heck imtrans(fno,fnot,"0132") #@todo use safer ia.rename() here. # https://casa.nrao.edu/docs/CasaRef/image.rename.html utils.rename(fnot,fno) nz = s['shape'][3] # get a new summary 's' taskinit.ia.open(fno) s = taskinit.ia.summary() taskinit.ia.close() logging.warning("Using imtrans, with nz=%d, to fix axis ordering" % nz) dt.tag("imtrans4") # @todo ensure first two axes are position, followed by frequency elif len(shape)==3: # the current importfits() can do defaultaxes=True,defaultaxesvalues=['', '', '', 'I'] # but then appears to return a ra-dec-pol-freq cube # this branch probably never happens, since ia.fromfits() will # properly convert a 3D cube to 4D now !! # NO: when NAXIS=3 but various AXIS4's are present, that works. But not if it's pure 3D # @todo box= logging.warning("patching up a 3D to 4D cube") raise Exception,"SHOULD NEVER GET HERE" fnot = fno + ".trans" casa.importfits(fni,fnot,defaultaxes=True,defaultaxesvalues=['', '', '', 'I']) utils.remove(fno) # ieck imtrans(fnot,fno,"0132") utils.remove(fnot) dt.tag("imtrans3") logging.regression('CUBE: %g %g %g %d %d %d %f' % (s0['min'],s0['max'],s0['rms'],nx,ny,nz,100.0*(1 - fgood))) # if the cube has only 1 plane (e.g. continuum) , create a visual (png or so) # for 3D cubes, rely on something like CubeSum if nz == 1: implot = ImPlot(pmode=self._plot_mode,ptype=self._plot_type,abspath=self.dir()) implot.plotter(rasterfile=bdpfile,figname=bdpfile) # @todo needs to be registered for the BDP, right now we only have the plot # ia.summary() doesn't have this easily available, so run the more expensive imhead() h = casa.imhead(fno,mode='list') telescope = h['telescope'] # work around CASA's PIPELINE bug/feature? if 'OBJECT' is blank, try 'FIELD' srcname = h['object'] if srcname == ' ': logging.warning('FIELD used for OBJECT') srcname = casa.imhead(fno,mode='get',hdkey='field') if srcname == False: # if no FIELD either, we're doomed. yes, this did happen. srcname = 'Unknown' casa.imhead(fno,mode="put",hdkey="object",hdvalue=srcname) h['object'] = srcname logging.info('TELESCOPE: %s' % telescope) logging.info('OBJECT: %s' % srcname) logging.info('REFFREQTYPE: %s' % h['reffreqtype']) if h['reffreqtype'].find('TOPO')>=0: msg = 'Ingest_AT: cannot deal with cubes with TOPOCENTRIC frequencies yet - winging it' logging.warning(msg) #raise Exception,msg # Ensure beam parameters are available if there are multiple beams # If there is just one beam, then we are just overwriting the header # variables with their identical values. if len(commonbeam) != 0: h['beammajor'] = commonbeam['major'] h['beamminor'] = commonbeam['minor'] h['beampa'] = commonbeam['pa'] # cheat add some things that need to be passed to summary.... h['badpixel'] = 1.0-fgood if vlsr < -999998.0: vlsr = admit.VLSR().vlsr(h['object'].upper()) h['vlsr'] = vlsr logging.info("VLSR = %f (from source catalog)" % vlsr) taskargs = "file=" + fitsfile if create_mask == True: taskargs = taskargs + " mask=True" if len(box) > 0: taskargs = taskargs + " " + str(box) if len(edge) > 0: taskargs = taskargs + " " + str(edge) r2d = 57.29577951308232 logging.info("RA Axis 1: %f %f %f" % (h['crval1']*r2d,h['cdelt1']*r2d*3600.0,h['crpix1'])) logging.info("DEC Axis 2: %f %f %f" % (h['crval2']*r2d,h['cdelt2']*r2d*3600.0,h['crpix2'])) if nz > 1: # @todo check if this is really a freq axis (for ALMA it is, but...) t3 = h['ctype3'] df = h['cdelt3'] fc = h['crval3'] + (0.5*(float(shape[2])-1)-h['crpix3'])*df # center freq; 0 based pixels if h.has_key('restfreq'): fr = float(h['restfreq'][0]) else: fr = fc fw = df*float(shape[2]) dv = -df/fr*utils.c logging.info("Freq Axis 3: %g %g %g" % (h['crval3']/1e9,h['cdelt3']/1e9,h['crpix3'])) logging.info("Cube Axis 3: type=%s velocity increment=%f km/s @ fc=%f fw=%f GHz" % (t3,dv,fc/1e9,fw/1e9)) # @todo sort out this restfreq/vlsr # report 'reffreqtype', 'restfreq' 'telescope' # if the fits file has ALTRVAL/ALTRPIX, this is lost in CASA? # but if you do fits->casa->fits , it's back in fits (with some obvious single precision loss of digits) # @todo ZSOURCE is the proposed VLSR slot in the fits header, but this has frame issues (it's also optical) # # Another method to get the vlsr is to override the restfreq (f0) with an AT keyword # and the 'restfreq' from the header (f) is then used to compute the vlsr: v = c (1 - f/f0) # if shape[2] > 1 and h.has_key('restfreq'): logging.info("RESTFREQ: %g %g %g" % (fr/1e9,h['restfreq'][0]/1e9,restfreq)) if shape[2] > 1: # v_radio of the center of the window w.r.t. restfreq c = utils.c # 299792.458 km/s vlsrc = c*(1-fc/fr) # @todo rel frame? vlsrw = dv*float(shape[2]) if restfreq > 0: vlsrf = c*(1-fr/restfreq/1e9) h['vlsr'] = vlsrf else: vlsrf = 0.0 logging.info("VLSRc = %f VLSRw = %f VLSRf = %f VLSR = %f" % (vlsrc, vlsrw, vlsrf, vlsr)) if h['vlsr'] == 0.0: # @todo! This fails if vlsr actually is zero. Need another magic number h['vlsr'] = vlsrc logging.warning("Warning: No VLSR found, substituting VLSRc = %f" % vlsrc) else: msg = 'Ingest_AT: missing RESTFREQ' print msg # @todo LINTRN is the ALMA keyword that designates the expected line transition in a spw self._summarize(fitsfile, bdpfile, h, shape, taskargs) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ self._summary = {} dt = utils.Dtime("Smooth") dt.tag("start") # get the input keys bmaj = self.getkey("bmaj") bmin = self.getkey("bmin") bpa = self.getkey("bpa") velres = self.getkey("velres") # take care of potential issues in the unit strings # @todo if not provided? bmaj['unit'] = bmaj['unit'].lower() bmin['unit'] = bmin['unit'].lower() velres['unit'] = velres['unit'].lower() taskargs = "bmaj=%s bmin=%s bpa=%s velres=%s" % (bmaj, bmin, bpa, velres) bdpnames = [] for ibdp in self._bdp_in: istem = ibdp.getimagefile(bt.CASA) image_in = ibdp.baseDir() + istem bdp_name = self.mkext(istem, 'sim') image_out = self.dir(bdp_name) taskinit.ia.open(image_in) h = casa.imhead(image_in, mode='list') pix_scale = np.abs(h['cdelt1'] * 206265.0) # pix scale in asec @todo QA ? CC = 299792458.0 # speed of light @todo somewhere else [utils.c , but in km/s] rest_freq = h['crval3'] # frequency pixel scale in km/s vel_scale = np.abs(CC * h['cdelt3'] / rest_freq / 1000.0) # unit conversion to arcsec (spatial) or km/s # (velocity) or some flavor of Hz. if (bmaj['unit'] == 'pixel'): bmaj = bmaj['value'] * pix_scale else: bmaj = bmaj['value'] if (bmin['unit'] == 'pixel'): bmin = bmin['value'] * pix_scale else: bmin = bmin['value'] hertz_input = False if velres['unit'] == 'pixel': velres['value'] = velres['value'] * vel_scale velres['unit'] = 'km/s' elif velres['unit'] == 'm/s': velres['value'] = velres['value'] / 1000.0 velres['unit'] = 'km/s' elif velres['unit'][-2:] == 'hz': hertz_input = True elif velres['unit'] == 'km/s': pass else: logging.error("Unknown units in velres=%s" % velres['unit']) rdata = bmaj # we smooth in velocity first. if smoothing in velocity # the cube apparently must be closed afterwards and # then reopened if spatial smoothing is to be done. if velres['value'] > 0: # handle the different units allowed. CASA doesn't # like lowercase for hz units... if not hertz_input: freq_res = str( velres['value'] * 1000.0 / CC * rest_freq) + 'Hz' else: freq_res = str(velres['value']) # try to convert velres to km/s for debug purposes velres['value'] = velres['value'] / rest_freq * CC / 1000.0 if (velres['unit'] == 'khz'): velres['value'] = velres['value'] * 1000.0 velres['unit'] = 'kHz' elif (velres['unit'] == 'mhz'): velres['value'] = velres['value'] * 1E6 velres['unit'] = 'MHz' elif (velres['unit'] == 'ghz'): velres['value'] = velres['value'] * 1E9 velres['unit'] = 'GHz' freq_res = freq_res + velres['unit'] # NB: there is apparently a bug in CASA. only smoothing along the frequency # axis does not work. sepconvolve gives a unit error (says axis unit is radian rather # than Hz). MUST smooth in 2+ dimensions if you want this to work. if (velres['value'] < vel_scale): raise Exception, "Desired velocity resolution %g less than pixel scale %g" % ( velres['value'], vel_scale) image_tmp = self.dir('tmp.smooth') im2=taskinit.ia.sepconvolve(outfile=image_tmp,axes=[0,1,2], types=["boxcar","boxcar","gauss"],\ widths=['1pix','1pix',freq_res], overwrite=True) im2.done() logging.debug("sepconvolve to %s" % image_out) # for some reason, doing this in memory does not seem to work, so outfile must be specified. logging.info( "Smoothing cube to a velocity resolution of %s km/s" % str(velres['value'])) logging.info("Smoothing cube to a frequency resolution of %s" % freq_res) taskinit.ia.close() taskinit.ia.open(image_tmp) dt.tag("sepconvolve") else: image_tmp = image_out # now do the spatial smoothing convolve_to_min_beam = True # default is to convolve to a min enclosing beam if bmaj > 0 and bmin > 0: # form qa objects out of these so that casa can understand bmaj = taskinit.qa.quantity(bmaj, 'arcsec') bmin = taskinit.qa.quantity(bmin, 'arcsec') bpa = taskinit.qa.quantity(bpa, 'deg') target_res = {} target_res['major'] = bmaj target_res['minor'] = bmin target_res['positionangle'] = bpa # throw an exception if cannot be convolved try: # for whatever reason, if you give convolve2d a beam parameter, # it complains ... im2=taskinit.ia.convolve2d(outfile=image_out,major = bmaj,\ minor = bmin, pa = bpa,\ targetres=True,overwrite=True) im2.done() logging.info( "Smoothing cube to a resolution of %s by %s at a PA of %s" % (str(bmaj['value']), str( bmin['value']), str(bpa['value']))) convolve_to_min_beam = False achieved_res = target_res except: # @todo remind what you need ? logging.error("Warning: Could not convolve to requested resolution of "\ +str(bmaj['value']) + " by " + str(bmin['value']) + \ " at a PA of "+ str(bpa['value'])) raise Exception, "Could not convolve to beam given!" dt.tag("convolve2d-1") if convolve_to_min_beam: restoring_beams = taskinit.ia.restoringbeam() commonbeam = taskinit.ia.commonbeam() # for whatever reason, setrestoringbeam does not use the same set of hashes... commonbeam['positionangle'] = commonbeam['pa'] del commonbeam['pa'] # if there's one beam, apparently the beams keyword does not exist if 'beams' in restoring_beams: print "Smoothing cube to a resolution of "+ \ str(commonbeam['major']['value']) +" by "+ \ str(commonbeam['minor']['value'])+" at a PA of "\ +str(commonbeam['pa']['value']) target_res = commonbeam im2=taskinit.ia.convolve2d(outfile=image_out,major=commonbeam['major'],\ minor=commonbeam['minor'],\ pa=commonbeam['positionangle'],\ targetres=True,overwrite=True) im2.done() achieved_res = commonbeam dt.tag("convolve2d-2") else: print "One beam for all planes. Smoothing to common beam redundant." achieved_res = commonbeam if velres['value'] < 0: taskinit.ia.fromimage(outfile=image_out, infile=image_in) # not really doing anything # else, we've already done what we needed to taskinit.ia.setrestoringbeam(beam=achieved_res) rdata = achieved_res['major']['value'] # else do no smoothing and just close the image taskinit.ia.close() dt.tag("close") b1 = SpwCube_BDP(bdp_name) self.addoutput(b1) # need to update for multiple images. b1.setkey("image", Image(images={bt.CASA: bdp_name})) bdpnames = bdpnames.append(bdp_name) # and clean up the temp image before the next image if velres['value'] > 0: utils.remove(image_tmp) # thes are task arguments not summary entries. _bmaj = taskinit.qa.convert(achieved_res['major'], 'rad')['value'] _bmin = taskinit.qa.convert(achieved_res['minor'], 'rad')['value'] _bpa = taskinit.qa.convert(achieved_res['positionangle'], 'deg')['value'] vres = "%.2f %s" % (velres['value'], velres['unit']) logging.regression("SMOOTH: %f %f" % (rdata, velres['value'])) self._summary["smooth"] = SummaryEntry( [bdp_name, convolve_to_min_beam, _bmaj, _bmin, _bpa, vres], "Smooth_AT", self.id(True), taskargs) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ dt = utils.Dtime("SFind2D") # tagging time self._summary = {} # get key words that user input nsigma = self.getkey("numsigma") sigma = self.getkey("sigma") region = self.getkey("region") robust = self.getkey("robust") snmax = self.getkey("snmax") nmax = self.getkey("nmax") ds9 = True # writes a "ds9.reg" file mpl = True # aplot.map1() plot dynlog = 20.0 # above this value of dyn range finder chart is log I-scaled bpatch = True # patch units to Jy/beam for ia.findsources() # get the input casa image from bdp[0] bdpin = self._bdp_in[0] infile = bdpin.getimagefile(bt.CASA) if mpl: data = np.flipud(np.rot90(casautil.getdata(self.dir(infile)).data)) # check if there is a 2nd image (which will be a PB) for i in range(len(self._bdp_in)): print 'BDP', i, type(self._bdp_in[i]) if self._bdp_in[2] != None: bdpin_pb = self._bdp_in[1] bdpin_cst = self._bdp_in[2] print "Need to process PB" else: bdpin_pb = None bdpin_cst = self._bdp_in[1] print "No PB given" # get the output bdp basename slbase = self.mkext(infile, 'sl') # make sure it's a 2D map if not casautil.mapdim(self.dir(infile), 2): raise Exception, "Input map dimension not 2: %s" % infile # arguments for imstat call if required args = {"imagename": self.dir(infile)} if region != "": args["region"] = region dt.tag("start") # The following code sets the sigma level for searching for sources using # the sigma and snmax keyword as appropriate # if no CubeStats BDP was given and no sigma was specified: # find a noise level via casa.imstat() # if a CubeStat_BDP is given get it from there. if bdpin_cst == None: # get statistics from input image with imstat because no CubeStat_BDP stat = casa.imstat(**args) dmin = float( stat["min"] [0]) # these would be wrong if robust were used already dmax = float(stat["max"][0]) args.update(casautil.parse_robust( robust)) # only now add robust keywords for the sigma stat = casa.imstat(**args) if sigma <= 0.0: sigma = float(stat["sigma"][0]) dt.tag("imstat") else: # get statistics from CubeStat_BDP sigma = bdpin_cst.get("sigma") dmin = bdpin_cst.get("minval") dmax = bdpin_cst.get("maxval") self.setkey("sigma", sigma) # calculate cutoff based either on RMS or dynamic range limitation drange = dmax / (nsigma * sigma) if snmax < 0.0: snmax = drange if drange > snmax: cutoff = 1.0 / snmax else: cutoff = 1.0 / drange logging.info("sigma, dmin, dmax, snmax, cutoff %g %g %g %g %g" % (sigma, dmin, dmax, snmax, cutoff)) # define arguments for call to findsources args2 = {"cutoff": cutoff} args2["nmax"] = nmax if region != "": args2["region"] = region #args2["mask"] = "" args2["point"] = False args2["width"] = 5 args2["negfind"] = False # set-up for SourceList_BDP slbdp = SourceList_BDP(slbase) # connect to casa image and call casa ia.findsources tool ia = taskinit.iatool() ia.open(self.dir(infile)) # findsources() cannot deal with 'Jy/beam.km/s' ??? # so for the duration of findsources() we patch it bunit = ia.brightnessunit() if bpatch and bunit != 'Jy/beam': logging.warning( "Temporarely patching your %s units to Jy/beam for ia.findsources()" % bunit) ia.setbrightnessunit('Jy/beam') else: bpatch = False atab = ia.findsources(**args2) if bpatch: ia.setbrightnessunit(bunit) taskargs = "nsigma=%4.1f sigma=%g region=%s robust=%s snmax=%5.1f nmax=%d" % ( nsigma, sigma, str(region), str(robust), snmax, nmax) dt.tag("findsources") nsources = atab["nelements"] xtab = [] ytab = [] logscale = False sumflux = 0.0 if nsources > 0: # @TODO: Why are Xpix, YPix not stored in the table? # -> PJT: I left them out since they are connected to an image which may not be available here # but we should store the frequency of the observation here for later bandmerging logging.debug("%s" % str(atab['component0']['shape'])) logging.info( "Right Ascen. Declination X(pix) Y(pix) Peak Flux Major Minor PA SNR" ) funits = atab['component0']['flux']['unit'] if atab['component0']['shape'].has_key('majoraxis'): sunits = atab['component0']['shape']['majoraxis']['unit'] aunits = atab['component0']['shape']['positionangle']['unit'] else: sunits = "n/a" aunits = "n/a" punits = ia.summary()['unit'] logging.info( " %s %s %s %s %s" % (punits, funits, sunits, sunits, aunits)) # # @todo future improvement is to look at image coordinates and control output appropriately # if ds9: # @todo variable name regname = self.mkext(infile, 'ds9.reg') fp9 = open(self.dir(regname), "w!") sn0 = -1.0 for i in range(nsources): c = "component%d" % i name = "%d" % (i + 1) r = atab[c]['shape']['direction']['m0']['value'] d = atab[c]['shape']['direction']['m1']['value'] pixel = ia.topixel([r, d]) xpos = pixel['numeric'][0] ypos = pixel['numeric'][1] rd = ia.toworld([xpos, ypos], 's') ra = rd['string'][0][:12] dec = rd['string'][1][:12] flux = atab[c]['flux']['value'][0] sumflux = sumflux + flux if atab[c]['shape'].has_key('majoraxis'): smajor = atab[c]['shape']['majoraxis']['value'] sminor = atab[c]['shape']['minoraxis']['value'] sangle = atab[c]['shape']['positionangle']['value'] else: smajor = 0.0 sminor = 0.0 sangle = 0.0 peakstr = ia.pixelvalue([xpos, ypos, 0, 0]) if len(peakstr) == 0: logging.warning("Problem with source %d @ %d,%d" % (i, xpos, ypos)) continue peakf = peakstr['value']['value'] snr = peakf / sigma if snr > dynlog: logscale = True if snr > sn0: sn0 = snr logging.info( "%s %s %8.2f %8.2f %10.3g %10.3g %7.3f %7.3f %6.1f %6.1f" % (ra, dec, xpos, ypos, peakf, flux, smajor, sminor, sangle, snr)) xtab.append(xpos) ytab.append(ypos) slbdp.addRow( [name, ra, dec, flux, peakf, smajor, sminor, sangle]) if ds9: ras = ra des = dec.replace('.', ':', 2) msg = 'ellipse(%s,%s,%g",%g",%g) # text={%s}' % ( ras, des, smajor, sminor, sangle + 90.0, i + 1) fp9.write("%s\n" % msg) if ds9: fp9.close() logging.info("Wrote ds9.reg") dt.tag("table") logging.regression("CONTFLUX: %d %g" % (nsources, sumflux)) summary = ia.summary() beammaj = summary['restoringbeam']['major']['value'] beammin = summary['restoringbeam']['minor']['value'] beamunit = summary['restoringbeam']['minor']['unit'] beamang = summary['restoringbeam']['positionangle']['value'] angunit = summary['restoringbeam']['positionangle']['unit'] # @todo add to table comments? logging.info(" Fitted Gaussian size; NOT deconvolved source size.") logging.info( " Restoring Beam: Major axis: %10.3g %s , Minor axis: %10.3g %s , PA: %5.1f %s" % (beammaj, beamunit, beammin, beamunit, beamang, angunit)) # form into a xml table # output is a table_bdp self.addoutput(slbdp) # instantiate a plotter for all plots made herein myplot = APlot(ptype=self._plot_type, pmode=self._plot_mode, abspath=self.dir()) # make output png with circles marking sources found if mpl: circles = [] nx = data.shape[1] # data[] array was already flipud(rot90)'d ny = data.shape[0] # for (x, y) in zip(xtab, ytab): circles.append([x, y, 1]) # @todo variable name if logscale: logging.warning("LogScaling applied") data = data / sigma data = np.where(data < 0, -np.log10(1 - data), +np.log10(1 + data)) if nsources == 0: title = "SFind2D: 0 sources above S/N=%.1f" % (nsigma) elif nsources == 1: title = "SFind2D: 1 source (%.1f < S/N < %.1f)" % (nsigma, sn0) else: title = "SFind2D: %d sources (%.1f < S/N < %.1f)" % ( nsources, nsigma, sn0) myplot.map1(data, title, slbase, thumbnail=True, circles=circles, zoom=self.getkey("zoom")) #--------------------------------------------------------- # Get the figure and thumbmail names and create a caption #--------------------------------------------------------- imname = myplot.getFigure(figno=myplot.figno, relative=True) thumbnailname = myplot.getThumbnail(figno=myplot.figno, relative=True) caption = "Image of input map with sources found by SFind2D overlayed in green." slbdp.table.description = "Table of source locations and sizes (not deconvolved)" #--------------------------------------------------------- # Add finder image to the BDP #--------------------------------------------------------- image = Image(images={bt.PNG: imname}, thumbnail=thumbnailname, thumbnailtype=bt.PNG, description=caption) slbdp.image.addimage(image, "finderimage") #------------------------------------------------------------- # Create the summary entry for the table and image #------------------------------------------------------------- self._summary["sources"] = SummaryEntry( [slbdp.table.serialize(), slbdp.image.serialize()], "SFind2D_AT", self.id(True), taskargs) dt.tag("done") dt.end()
def run(self): """ The run method, creates the slices, regrids if requested, and creates the BDP(s) Parameters ---------- None Returns ------- None """ dt = utils.Dtime("LineCube") self._summary = {} # look for an input noise level, either through keyword or input # CubeStats BDP or calculate it if needed pad = self.getkey("pad") equalize = self.getkey("equalize") minchan = 0 linelist = self._bdp_in[1] if linelist == None or len(linelist) == 0: logging.info("No lines found in input LineList_BDP, exiting.") return spw = self._bdp_in[0] # get the columns from the table cols = linelist.table.getHeader() # get the casa image imagename = spw.getimagefile(bt.CASA) imh = imhead(self.dir(imagename), mode='list') # set the overall parameters for imsubimage args = {"imagename" : self.dir(imagename), "overwrite" : True} dt.tag("start") if pad != 0: nchan = imh['shape'][2] dt.tag("pad") # if equal size cubes are requested, this will honor the requested pad if equalize: start = linelist.table.getColumnByName("startchan") end = linelist.table.getColumnByName("endchan") # look for the widest line for i in range(len(start)): diff = end[i] - start[i] + 1 minchan = max(minchan , diff + (pad * 2)) dt.tag("equalize") # get all of the rows in the table rows = linelist.getall() delrow = set() procblend = [0] # search through looking for blended lines, leave only the strongest from each blend # in the list for i, row in enumerate(rows): if row.blend in procblend: continue strongest = -100. index = -1 indexes = [] blend = row.blend for j in range(i, len(rows)): if rows[j].blend != blend: continue indexes.append(j) if rows[j].linestrength > strongest: strongest = rows[j].linestrength index = j indexes.remove(index) delrow = delrow | set(indexes) procblend.append(blend) dr = list(delrow) dr.sort() dr.reverse() for row in dr: del rows[row] # check on duplicate UID's, since those are the directory names here uid1 = [] for row in rows: uid1.append(row.getkey("uid")) uid2 = set(uid1) if len(uid1) != len(uid2): print "LineList:",uid1 logging.warning("There are duplicate names in the LineList") #raise Exception,"There are duplicate names in the LineList" # Create Summary table lc_description = admit.util.Table() lc_description.columns = ["Line Name","Start Channel","End Channel","Output Cube"] lc_description.units = ["","int","int",""] lc_description.description = "Parameters of Line Cubes" # loop over all entries in the line list rdata = [] for row in rows: uid = row.getkey("uid") cdir = self.mkext(imagename,uid) self.mkdir(cdir) basefl = uid lcd = [basefl] outfl = cdir + os.sep + "lc.im" args["outfile"] = self.dir(outfl) start = row.getkey("startchan") end = row.getkey("endchan") diff = end - start + 1 startch = 0 if diff < minchan: add = int(math.ceil(float(minchan - diff) / 2.0)) start -= add end += add startch += add if start < 0: logging.info("%s is too close to the edge to encompass with the " + "requested channels, start=%d resetting to 0" % (uid, start)) startch += abs(start) start = 0 if end >= nchan: logging.info("%s is too close to the edge to encompass with the " + "requested channels, end=%d resetting to %d" % (uid, end, nchan - 1)) end = nchan - 1 #print "\n\nDIFF ",startch,"\n\n" if pad > 0 and not equalize: start -= pad end += pad if start < 0: logging.warning("pad=%d too large, start=%d resetting to 0" % (pad, start)) startch += abs(start) start = 0 else: startch += pad if end >= nchan: logging.warning("pad=%d too large, end=%d resetting to %d" % (pad, end, nchan - 1)) end = nchan - 1 elif pad < 0 and not equalize: mid = (start + end) / 2 start = mid + pad / 2 end = mid - pad / 2 - 1 if start < 0: logging.warning("pad=%d too large, start=%d resetting to 0" % (pad, start)) startch += abs(start) start = 0 else: startch += abs(start) if end >= nchan: logging.warning("pad=%d too large, end=%d resetting to %d" % (pad, end, nchan - 1)) end = nchan - 1 endch = startch + diff args["chans"] = "%i~%i" % (start, end) rdata.append(start) rdata.append(end) # for the summmary, which will be a table of # Line name, start channel, end channel, output image lc_description.addRow([basefl, start, end, outfl]) # create the slices imsubimage(**args) line = row.converttoline() # set the restfrequency ouf the output cube imhead(imagename=args["outfile"], mode="put", hdkey="restfreq", hdvalue="%fGHz" % (row.getkey("frequency"))) # set up the output BDP images = {bt.CASA : outfl} casaimage = Image(images=images) # note that Summary.getLineFluxes() implicitly relies on the BDP out order # being the same order as in the line list table. If this is ever not # true, then Summary.getLineFluxes mismatch BDPs and flux values. #self.addoutput(LineCube_BDP(xmlFile=cdir + os.sep + basefl + ".lc", self.addoutput(LineCube_BDP(xmlFile=outfl, image=casaimage, line=line, linechans="%i~%i" % (startch, endch))) dt.tag("trans-%s" % cdir) logging.regression("LC: %s" % str(rdata)) taskargs = "pad=%s equalize=%s" % (pad, equalize) self._summary["linecube"] = SummaryEntry(lc_description.serialize(), "LineCube_AT", self.id(True), taskargs) dt.tag("done") dt.end()
def run(self): """ The run method creates the BDP Parameters ---------- None Returns ------- None """ dt = utils.Dtime("CubeSum") # tagging time self._summary = {} # an ADMIT summary will be created here numsigma = self.getkey("numsigma") # get the input keys sigma = self.getkey("sigma") use_lines = self.getkey("linesum") pad = self.getkey("pad") b1 = self._bdp_in[0] # spw image cube b1a = self._bdp_in[1] # cubestats (optional) b1b = self._bdp_in[2] # linelist (optional) f1 = b1.getimagefile(bt.CASA) taskinit.ia.open(self.dir(f1)) s = taskinit.ia.summary() nchan = s['shape'][2] if b1b != None: ch0 = b1b.table.getFullColumnByName("startchan") ch1 = b1b.table.getFullColumnByName("endchan") s = Segments(ch0,ch1,nchan=nchan) # @todo something isn't merging here as i would have expected, # e.g. test0.fits [(16, 32), (16, 30), (16, 29)] if pad > 0: for (c0,c1) in s.getsegmentsastuples(): s.append([c0-pad,c0]) s.append([c1,c1+pad]) s.merge() s.recalcmask() # print "PJT segments:",s.getsegmentsastuples() ns = len(s.getsegmentsastuples()) chans = s.chans(not use_lines) if use_lines: msum = s.getmask() else: msum = 1 - s.getmask() logging.info("Read %d segments" % ns) # print "chans",chans # print "msum",msum # from a deprecated keyword, but kept here to pre-smooth the spectrum before clipping # examples are: ['boxcar',3] ['gaussian',7] ['hanning',5] smooth= [] sig_const = False # figure out if sigma is taken as constant in the cube if b1a == None: # if no 2nd BDP was given, sigma needs to be specified if sigma <= 0.0: raise Exception,"Neither user-supplied sigma nor CubeStats_BDP input given. One is required." else: sig_const = True # and is constant else: if sigma > 0: sigma = b1a.get("sigma") sig_const = True if sig_const: logging.info("Using constant sigma = %f" % sigma) else: logging.info("Using varying sigma per plane") infile = b1.getimagefile(bt.CASA) # ADMIT filename of the image (cube) bdp_name = self.mkext(infile,'csm') # morph to the new output name with replaced extension 'csm' image_out = self.dir(bdp_name) # absolute filename args = {"imagename" : self.dir(infile)} # assemble arguments for immoments() args["moments"] = 0 # only need moments=0 (or [0] is ok as well) args["outfile"] = image_out # note full pathname dt.tag("start") if sig_const: args["excludepix"] = [-numsigma*sigma, numsigma*sigma] # single global sigma if b1b != None: # print "PJT: ",chans args["chans"] = chans else: # @todo in this section bad channels can cause a fully masked cubesum = bad # cubestats input sigma_array = b1a.table.getColumnByName("sigma") # channel dependent sigma sigma_pos = sigma_array[np.where(sigma_array>0)] smin = sigma_pos.min() smax = sigma_pos.max() logging.info("sigma varies from %f to %f" % (smin,smax)) maxval = b1a.get("maxval") # max in cube nzeros = len(np.where(sigma_array<=0.0)[0]) # check bad channels if nzeros > 0: logging.warning("There are %d NaN channels " % nzeros) # raise Exception,"need to recode CubeSum or use constant sigma" dt.tag("grab_sig") if len(smooth) > 0: # see also LineID and others filter = Filter1D.Filter1D(sigma_array,smooth[0],**Filter1D.Filter1D.convertargs(smooth)) sigma_array = filter.run() dt.tag("smooth_sig") # create a CASA image copy for making the mirror sigma cube to mask against file = self.dir(infile) mask = file+"_mask" taskinit.ia.fromimage(infile=file, outfile=mask) nx = taskinit.ia.shape()[0] ny = taskinit.ia.shape()[1] nchan = taskinit.ia.shape()[2] taskinit.ia.fromshape(shape=[nx,ny,1]) plane = taskinit.ia.getchunk([0,0,0],[-1,-1,0]) # convenience plane for masking operation dt.tag("mask_sig") taskinit.ia.open(mask) dt.tag("open_mask") count = 0 for i in range(nchan): if sigma_array[i] > 0: if b1b != None: if msum[i]: taskinit.ia.putchunk(plane*0+sigma_array[i],blc=[0,0,i,-1]) count = count + 1 else: taskinit.ia.putchunk(plane*0+maxval,blc=[0,0,i,-1]) else: taskinit.ia.putchunk(plane*0+sigma_array[i],blc=[0,0,i,-1]) count = count + 1 else: taskinit.ia.putchunk(plane*0+maxval,blc=[0,0,i,-1]) taskinit.ia.close() logging.info("%d/%d channels used for CubeSum" % (count,nchan)) dt.tag("close_mask") names = [file, mask] tmp = file + '.tmp' if numsigma == 0.0: # hopefully this will also make use of the mask exp = "IM0[IM1<%f]" % (0.99*maxval) else: exp = "IM0[abs(IM0/IM1)>%f]" % (numsigma) # print "PJT: exp",exp casa.immath(mode='evalexpr', imagename=names, expr=exp, outfile=tmp) args["imagename"] = tmp dt.tag("immath") casa.immoments(**args) dt.tag("immoments") if sig_const is False: # get rid of temporary files utils.remove(tmp) utils.remove(mask) # get the flux taskinit.ia.open(image_out) st = taskinit.ia.statistics() taskinit.ia.close() dt.tag("statistics") # report that flux, but there's no way to get the units from casa it seems # ia.summary()['unit'] is usually 'Jy/beam.km/s' for ALMA # imstat() does seem to know it. if st.has_key('flux'): rdata = [st['flux'][0],st['sum'][0]] logging.info("Total flux: %f (sum=%f)" % (st['flux'],st['sum'])) else: rdata = [st['sum'][0]] logging.info("Sum: %f (beam parameters missing)" % (st['sum'])) logging.regression("CSM: %s" % str(rdata)) # Create two output images for html and their thumbnails, too implot = ImPlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir()) implot.plotter(rasterfile=bdp_name,figname=bdp_name,colorwedge=True) figname = implot.getFigure(figno=implot.figno,relative=True) thumbname = implot.getThumbnail(figno=implot.figno,relative=True) dt.tag("implot") thumbtype = bt.PNG # really should be correlated with self._plot_type!! # 2. Create a histogram of the map data # get the data for a histogram data = casautil.getdata(image_out,zeromask=True).compressed() dt.tag("getdata") # get the label for the x axis bunit = casa.imhead(imagename=image_out, mode="get", hdkey="bunit") # Make the histogram plot # Since we give abspath in the constructor, figname should be relative myplot = APlot(ptype=self._plot_type,pmode=self._plot_mode,abspath=self.dir()) auxname = bdp_name + "_histo" auxtype = bt.PNG # really should be correlated with self._plot_type!! myplot.histogram(columns = data, figname = auxname, xlab = bunit, ylab = "Count", title = "Histogram of CubeSum: %s" % (bdp_name), thumbnail=True) auxname = myplot.getFigure(figno=myplot.figno,relative=True) auxthumb = myplot.getThumbnail(figno=myplot.figno,relative=True) images = {bt.CASA : bdp_name, bt.PNG : figname} casaimage = Image(images = images, auxiliary = auxname, auxtype = auxtype, thumbnail = thumbname, thumbnailtype = thumbtype) if hasattr(b1,"line"): # SpwCube doesn't have Line line = deepcopy(getattr(b1,"line")) if type(line) != type(Line): line = Line(name="Undetermined") else: line = Line(name="Undetermined") # fake a Line if there wasn't one self.addoutput(Moment_BDP(xmlFile=bdp_name,moment=0,image=deepcopy(casaimage),line=line)) imcaption = "Integral (moment 0) of all emission in image cube" auxcaption = "Histogram of cube sum for image cube" taskargs = "numsigma=%.1f sigma=%g smooth=%s" % (numsigma, sigma, str(smooth)) self._summary["cubesum"] = SummaryEntry([figname,thumbname,imcaption,auxname,auxthumb,auxcaption,bdp_name,infile],"CubeSum_AT",self.id(True),taskargs) dt.tag("done") dt.end()