def write_ip_NDF(data, bad_pixel_ref): """ This function writes out the array ip parameter data to an ndf_file. Invocation: result = write_ip_NDF(data,bad_pixel_ref) Arguements: data = The array ip parameter data bad_ref = A NDF with bad pixel values to copy over. Returned Value: Writes NDF and returns handle. """ ndf_name_orig = NDG(1) indf = ndf.open(ndf_name_orig[0], 'WRITE', 'NEW') indf.new('_DOUBLE', 2, numpy.array([1, 1]), numpy.array([32, 40])) ndfmap = indf.map('DATA', '_DOUBLE', 'WRITE') ndfmap.numpytondf(data) indf.annul() # Copy bad pixels ndf_name = NDG(1) invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format( ndf_name_orig, bad_pixel_ref, ndf_name)) return ndf_name
def cleanup(): global retain ParSys.cleanup() if retain: msg_out( "Retaining temporary files in {0}".format(NDG.tempdir)) else: NDG.cleanup()
def cleanup(): global retain ParSys.cleanup() if retain: msg_out("Retaining temporary files in {0}".format(NDG.tempdir)) else: NDG.cleanup()
def cleanup(): global retain try: starutil.ParSys.cleanup() if retain: msg_out("Retaining temporary files in {0}".format(NDG.tempdir)) else: NDG.cleanup() except: pass
def cleanup(): global retain try: starutil.ParSys.cleanup() if retain: msg_out( "Retaining temporary files in {0}".format(NDG.tempdir)) else: NDG.cleanup() except: pass
def cleanup(): global retain, new_ext_ndfs try: starutil.ParSys.cleanup() if retain: msg_out( "Retaining EXT models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir)) else: NDG.cleanup() for ext in new_ext_ndfs: os.remove( ext ) except: pass
def force_flat(ins, masks): """ Forces the background regions to be flat in a set of Q or U images. Invocation: result = force_flat( ins, masks ) Arguments: in = NDG An NDG object specifying a group of Q or U images from which any low frequency background structure is to be removed. masks = NDG An NDG object specifying a corresponding group of Q or U images in which source pixels are bad. These are only used to mask the images specified by "in". It should have the same size as "in". Returned Value: A new NDG object containing the group of corrected Q or U images. """ # How many NDFs are we processing? nndf = len(ins) # Blank out sources by copy the bad pixels from "mask" into "in". msg_out(" masking...") qm = NDG(ins) invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins, masks, qm)) # Smooth the blanked NDFs using a 3 pixel Gaussian. Set wlim so that # small holes are filled in by the smoothing process. msg_out(" smoothing...") qs = NDG(ins) invoke("$KAPPA_DIR/gausmooth in={0} out={1} fwhm=3 wlim=0.5".format( qm, qs)) # Fill remaining big holes using artifical data. msg_out(" filling...") qf = NDG(ins) invoke("$KAPPA_DIR/fillbad in={0} out={1} niter=10 size=10 variance=no". format(qs, qf)) # Subtract the filled low frequency data form the original to create the # returned images. msg_out(" removing low frequency background structure...") result = NDG(ins) invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(ins, qf, result)) return result
def run_calcqu(input_data,config,harmonic): # The following call to SMURF:CALCQU creates two HDS container files - # one holding a set of Q NDFs and the other holding a set of U NDFs. Create # these container files in the NDG temporary directory. qcont = NDG(1) qcont.comment = "qcont" ucont = NDG(1) ucont.comment = "ucont" msg_out( "Calculating Q and U values for each bolometer...") invoke("$SMURF_DIR/calcqu in={0} config=\"{1}\" lsqfit=no outq={2} outu={3} " "harmonic={4} fix".format(input_data,starutil.shell_quote(config), qcont,ucont,harmonic) ) return (qcont,ucont)
def cleanup(): global retain, new_ext_ndfs, new_lut_ndfs, new_noi_ndfs try: starutil.ParSys.cleanup() if retain: msg_out( "Retaining EXT, LUT and NOI models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir)) else: NDG.cleanup() for ext in new_ext_ndfs: os.remove( ext ) for lut in new_lut_ndfs: os.remove( lut ) for noi in new_noi_ndfs: os.remove( noi ) except: pass
def cleanup(): global retain, new_ext_ndfs, new_lut_ndfs, new_noi_ndfs try: starutil.ParSys.cleanup() if retain: msg_out( "Retaining EXT, LUT and NOI models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir)) else: NDG.cleanup() for ext in new_ext_ndfs: os.remove( ext ) for lut in new_lut_ndfs: os.remove( lut ) for noi in new_noi_ndfs: os.remove( noi ) for res in qua: os.remove( res ) except: pass
def run_calcqu(input_data, config, harmonic): # The following call to SMURF:CALCQU creates two HDS container files - # one holding a set of Q NDFs and the other holding a set of U NDFs. Create # these container files in the NDG temporary directory. qcont = NDG(1) qcont.comment = "qcont" ucont = NDG(1) ucont.comment = "ucont" msg_out("Calculating Q and U values for each bolometer...") invoke( "$SMURF_DIR/calcqu in={0} config=\"{1}\" lsqfit=no outq={2} outu={3} " "harmonic={4} fix".format(input_data, starutil.shell_quote(config), qcont, ucont, harmonic)) return (qcont, ucont)
def remove_corr(ins, masks): """ Masks the supplied set of Q or U images and then looks for and removes correlated components in the background regions. Invocation: result = remove_corr( ins, masks ) Arguments: ins = NDG An NDG object specifying a group of Q or U images from which correlated background components are to be removed. masks = NDG An NDG object specifying a corresponding group of Q or U images in which source pixels are bad. These are only used to mask the images specified by "in". It should have the same size as "in". Returned Value: A new NDG object containing the group of corrected Q or U images. """ # How many NDFs are we processing? nndf = len(ins) # Blank out sources by copy the bad pixels from "mask" into "in". We refer # to "q" below, but the same applies whether processing Q or U. msg_out(" masking...") qm = NDG(ins) invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins, masks, qm)) # Find the most correlated pair of imagtes. We use the basic correlation # coefficient calculated by kappa:scatter for this. msg_out(" Finding most correlated pair of images...") cmax = 0 for i in range(0, nndf - 1): for j in range(i + 1, nndf): invoke("$KAPPA_DIR/scatter in1={0} in2={1} device=!".format( qm[i], qm[j])) c = starutil.get_task_par("corr", "scatter") if abs(c) > abs(cmax): cmax = c cati = i catj = j if abs(cmax) < 0.3: msg_out(" No correlated images found!") return ins msg_out(" Correlation for best pair of images = {0}".format(cmax)) # Find images that are reasonably correlated to the pair found above, # and coadd them to form a model for the correlated background # component. Note, the holes left by the masking are filled in by the # coaddition using background data from other images. msg_out(" Forming model...") # Form the average of the two most correlated images, first normalising # them to a common scale so that they both have equal weight. norm = "{0}/norm".format(NDG.tempdir) if not normer(qm[cati], qm[catj], 0.3, norm): norm = qm[cati] mslist = NDG([qm[catj], norm]) ave = "{0}/ave".format(NDG.tempdir) invoke( "$CCDPACK_DIR/makemos in={0} method=mean genvar=no usevar=no out={1}". format(mslist, ave)) # Loop round each image finding the correlation factor of the image and # the above average image. temp = "{0}/temp".format(NDG.tempdir) nlist = [] ii = 0 for i in range(0, nndf): c = blanker(qm[i], ave, temp) # If the correlation is high enough, normalize the image to the average # image and then include the normalised image in the list of images to be # coadded to form the final model. if abs(c) > 0.3: tndf = "{0}/t{1}".format(NDG.tempdir, ii) ii += 1 invoke( "$KAPPA_DIR/normalize in1={1} in2={2} out={0} device=!".format( tndf, temp, ave)) nlist.append(tndf) if ii == 0: msg_out(" No secondary correlated images found!") return ins msg_out( " Including {0} secondary correlated images in the model.".format( ii)) # Coadded the images created above to form the model of the correlated # background component. Fill any remaining bad pixels with artificial data. model = "{0}/model".format(NDG.tempdir) included = NDG(nlist) invoke( "$CCDPACK_DIR/makemos in={0} method=mean usevar=no genvar=no out={1}". format(included, temp)) invoke("$KAPPA_DIR/fillbad in={1} variance=no out={0} size=10 niter=10". format(model, temp)) # Now estimate how much of the model is present in each image and remove it. msg_out(" Removing model...") temp2 = "{0}/temp2".format(NDG.tempdir) qnew = NDG(ins) nbetter = 0 for i in range(0, nndf): # Try to normalise the model to the current image. This fails if the # correlation between them is too low. if normer(model, qm[i], 0.3, temp): # Remove the scaled model form the image. invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format( ins[i], temp, temp2)) # We now check that removing the correlated background component has in # fact made the image flatter (poor fits etc can mean that images that # are poorly correlated to the model have a large amount of model # removed and so make the image less flat). FInd the standard deviation # of the data in the original image and in the corrected image. invoke("$KAPPA_DIR/stats {0} quiet".format(ins[i])) oldsig = get_task_par("sigma", "stats") invoke("$KAPPA_DIR/stats {0} quiet".format(temp2)) newsig = get_task_par("sigma", "stats") # If the correction has made the image flatter, copy it to the returned NDG. if newsig < oldsig: nbetter += 1 invoke("$KAPPA_DIR/ndfcopy in={1} out={0}".format( qnew[i], temp2)) else: invoke("$KAPPA_DIR/ndfcopy in={0} out={1}".format( ins[i], qnew[i])) # If the input image is poorly correlated to the model, return the input # image unchanged. else: invoke("$KAPPA_DIR/ndfcopy in={0} out={1}".format(ins[i], qnew[i])) msg_out(" {0} out of {1} images have been improved.".format( nbetter, nndf)) # Return the corrected images. return qnew
if ucont == None: raise starutil.InvalidParameterError("Supplied QU files ({0}) " "do not contain any U data.".format(inqu)) else: ucont.comment = "ucont" # Otherwise create a set of Q images and a set of U images. These are put # into the HDS container files "q_TMP.sdf" and "u_TMP.sdf". Each image # contains Q or U values derived from a short section of raw data during # which each bolometer moves less than half a pixel. else: # The following call to SMURF:CALCQU creates two HDS container files - # one holding a set of Q NDFs and the other holding a set of U NDFs. Create # these container files in the NDG temporary directory. qcont = NDG(1) qcont.comment = "qcont" ucont = NDG(1) ucont.comment = "ucont" msg_out( "Calculating Q and U values for each bolometer...") invoke("$SMURF_DIR/calcqu in={0} config={1} outq={2} outu={3} fix". format(indata,config,qcont,ucont) ) # Remove spikes from the Q and U images. The cleaned NDFs are written to # temporary NDFs specified by two new NDG objects "qff" and "uff", which # inherit their size from the existing groups "qcont" and "ucont". msg_out( "Removing spikes from bolometer Q and U values...") qff = NDG(qcont) qff.comment = "qff" uff = NDG(ucont)
indata = parsys["IN"].value retain = parsys["RETAIN"].value # Erase any NDFs holding cleaned data or pointing data from previous runs. for path in glob.glob("s*_con_res_cln.sdf"): myremove(path) base = path[:-16] myremove("{0}_lat.sdf".format(base)) myremove("{0}_lon.sdf".format(base)) # Use sc2concat to concatenate and flatfield the data. invoke("$SMURF_DIR/sc2concat in={0} out='./*_umap'".format(indata)) # Use makemap to generate quaity and pointing info. concdata = NDG("*_umap") confname = NDG.tempfile() fd = open(confname,"w") fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n") fd.write("numiter=1\n") fd.write("exportclean=1\n") fd.write("exportlonlat=1\n") fd.write("dcfitbox=0\n") fd.write("noisecliphigh=0\n") fd.write("order=0\n") fd.write("downsampscale=0\n") fd.close() map = NDG(1) invoke("$SMURF_DIR/makemap in={0} out={1} config='^{2}'".format(concdata,map,confname)) # We do not need the concatenated data any more (we use the cleaned data
raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) " "does not exist".format(restart) ) fred = loadndg( "IN", True ) if indata != fred: raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) " "refers to different time-series data".format(restart) ) msg_out( "Re-using data in {0}".format(restart) ) # Initialise the starlink random number seed to a known value so that # results are repeatable. os.environ["STAR_SEED"] = "65" # Flat field the supplied template data ff = loadndg( "FF" ) if not ff: ff = NDG(indata) msg_out( "Flatfielding template data...") invoke("$SMURF_DIR/flatfield in={0} out={1}".format(indata,ff) ) ff = ff.filter() savendg( "FF", ff ) else: msg_out( "Re-using old flatfielded template data...") # If required, create new artificial I, Q and U maps. if newart: msg_out( "Creating new artificial I, Q and U maps...") # Get the parameters defining the artificial data ipeak = parsys["IPEAK"].value ifwhm = parsys["IFWHM"].value pol = parsys["POL"].value
else: fcf_i = 491.0 elif filter == 850: fcf_qu = 725.0 if ipol2: fcf_i = 725.0 else: fcf_i = 537.0 else: raise starutil.InvalidParameterError("Invalid FILTER header value " "'{0} found in {1}.".format( filter, qin[0] ) ) # Remove any spectral axes qtrim = NDG(qin) invoke( "$KAPPA_DIR/ndfcopy in={0} out={1} trim=yes".format(qin,qtrim) ) utrim = NDG(uin) invoke( "$KAPPA_DIR/ndfcopy in={0} out={1} trim=yes".format(uin,utrim) ) itrim = NDG(iin) invoke( "$KAPPA_DIR/ndfcopy in={0} out={1} trim=yes".format(iin,itrim) ) # Rotate them to use the same polarimetric reference direction. qrot = NDG(qtrim) urot = NDG(utrim) invoke( "$POLPACK_DIR/polrotref qin={0} uin={1} like={2} qout={3} uout={4} ". format(qtrim,utrim,qtrim[0],qrot,urot) ) # Mosaic them into a single set of Q, U and I images, aligning them # with the first I image. qmos = NDG( 1 )
def get_filtered_skydip_data(qarray, uarray, clip, a): """ This function takes q and u array data (output from calcqu), applies ffclean to remove spikes and puts in numpy array variable It borrows (copies) heavily from pol2cat.py (2015A) Invocation: ( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref ) = ... get_filtered_skydip_data(qarray,uarray,clip,a) Arguments: qarray = An NDF of Q array data (output from calcqu). uarray = An NDF of U array data (output form calcqu). clip = The sigma cut for ffclean. a = A string indicating the array (eg. 'S8A'). Returned Value: qdata_total = A numpy array with the cleaned qarray data. qvar_total = A numpy array with the qarray variance data. udata_total = A numpy array with the cleaned uarray data. uvar_total = A numpy array with the uarray variance data. elevation = A numpy array with the elevation data opacity_term = A numpy array with the opacity brightness term (1-exp(-tau*air_mass)) Here tau is calculated using the WVM data as input. """ # Remove spikes from the Q images for the current subarray. The cleaned NDFs # are written to temporary NDFs specified by the new NDG object "qff", which # inherit its size from the existing group "qarray"". msg_out("Removing spikes from {0} bolometer Q values...".format(a)) qff = NDG(qarray) qff.comment = "qff" invoke("$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]". format(qarray, qff, clip)) # Remove spikes from the U images for the current subarray. The cleaned NDFs # are written to temporary NDFs specified by the new NDG object "uff", which # inherit its size from the existing group "uarray"". msg_out("Removing spikes from {0} bolometer U values...".format(a)) uff = NDG(uarray) uff.comment = "uff" invoke("$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]". format(uarray, uff, clip)) elevation = [] opacity_term = [] for stare in range(len(qff[:])): # Stack Q data in numpy array # Get elevation information elevation.append( numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=ELSTART" .format(qff[stare]))))) # Get Tau (Opacity) information tau_temp = numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=WVMTAUST". format(qff[stare])))) # Convert to obs band. if '4' in a: tau_temp = 19.04 * (tau_temp - 0.018) # Eq from Dempsey et al elif '8' in a: tau_temp = 5.36 * (tau_temp - 0.006) # Eq from Dempsey et al. opacity_term.append(1 - numpy.exp(-1 * tau_temp / numpy.sin(numpy.radians(elevation[-1])))) invoke("$KAPPA_DIR/ndftrace {0} quiet".format(qff[stare])) nx = get_task_par("dims(1)", "ndftrace") ny = get_task_par("dims(2)", "ndftrace") qdata_temp = numpy.reshape(Ndf(qff[stare]).data, (ny, nx)) qdata_temp[numpy.abs(qdata_temp) > 1e300] = numpy.nan if stare == 0: qdata_total = qdata_temp else: qdata_total = numpy.dstack((qdata_total, qdata_temp)) qvar_temp = numpy.reshape(Ndf(qff[stare]).var, (ny, nx)) qdata_temp[numpy.abs(qvar_temp) > 1e300] = numpy.nan if stare == 0: qvar_total = qvar_temp else: qvar_total = numpy.dstack((qvar_total, qvar_temp)) # Stack U data in numpy array invoke("$KAPPA_DIR/ndftrace {0} quiet".format(uff[stare])) nx = get_task_par("dims(1)", "ndftrace") ny = get_task_par("dims(2)", "ndftrace") udata_temp = numpy.reshape(Ndf(uff[stare]).data, (ny, nx)) udata_temp[numpy.abs(udata_temp) > 1e300] = numpy.nan if stare == 0: udata_total = udata_temp else: udata_total = numpy.dstack((udata_total, udata_temp)) uvar_temp = numpy.reshape(Ndf(uff[stare]).var, (ny, nx)) udata_temp[numpy.abs(uvar_temp) > 1e300] = numpy.nan if stare == 0: uvar_total = uvar_temp else: uvar_total = numpy.dstack((uvar_total, uvar_temp)) # Create bad pixel reference. bad_pixel_ref = NDG(1) invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format( qff, uff, bad_pixel_ref)) return (qdata_total, qvar_total, udata_total, uvar_total, elevation, opacity_term, bad_pixel_ref)
chi2Vals[row_val, col_val] = ipprms.fun else: returnCode[row_val, col_val] = False # Write NDFs. out_p0 = write_ip_NDF(ip_prms['Pf_' + a[-1]], bad_pixel_ref) out_p1 = write_ip_NDF(ipprms_pol_screen, bad_pixel_ref) out_c0 = write_ip_NDF(ipprms_Co, bad_pixel_ref) out_angc = write_ip_NDF(ip_prms['Theta_ip_' + a[-1]], bad_pixel_ref) # Fill any bad pixels with smooth function to match surrounding pixels msg_out( "Filling in bad pixel values for {0} bolometer IP parameters..." .format(a)) out_p0_filled = NDG(1) invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15" .format(out_p0, out_p0_filled)) out_p1_filled = NDG(1) invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15" .format(out_p1, out_p1_filled)) out_c0_filled = NDG(1) invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15" .format(out_c0, out_c0_filled)) out_angc_filled = NDG(1) invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15" .format(out_angc, out_angc_filled))
# Initial peak value peak_value = noise*0.5 # Do tests for 5 different peak values for ipeak in range(0, 1): starutil.msg_out( ">>> Doing sep={0} and peak={1}....".format(clump_separation,peak_value)) # Get the dimensions of a square image that would be expected to # contain the target number of clumps at the current separation. npix = int( clump_separation*math.sqrt( nclump_target ) ) # Create a temporary file containing circular clumps of constant size # and shape (except for the effects of noise). model = NDG(1) out = NDG(1) outcat = NDG.tempfile(".fit") invoke( "$CUPID_DIR/makeclumps angle=\[0,0\] beamfwhm=0 deconv=no " "fwhm1=\[{0},0\] fwhm2=\[{0},0\] lbnd=\[1,1\] ubnd=\[{1},{1}\] " "model={2} nclump={3} out={4} outcat={5} pardist=normal " "peak = \[{6},0\] rms={7} trunc=0.1". format(clump_fwhm,npix,model,nclump_target,out,outcat, peak_value,noise) ) # Run fellwalker on the data. mask = NDG(1) outcat_fw = NDG.tempfile(".fit") invoke( "$CUPID_DIR/findclumps config=def deconv=no in={0} " "method=fellwalker out={1} outcat={2} rms={3}". format(out,mask,outcat_fw,noise) ) # Get the number of clumps found by FellWalker.
system = parsys["SYSTEM"].value if system == "ICRS": parsys["CENTRE1"].prompt = "RA at centre of required circle" parsys["CENTRE2"].prompt = "Dec at centre of required circle" else: parsys[ "CENTRE1"].prompt = "Galactic longitude at centre of required circle" parsys[ "CENTRE2"].prompt = "Galactic latitude at centre of required circle" centre1 = parsys["CENTRE1"].value if centre1 is not None: centre2 = parsys["CENTRE2"].value radius = parsys["RADIUS"].value frame = NDG.tempfile() invoke("$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format( system, frame)) invoke("$ATOOLS_DIR/astunformat {0} 1 {1}".format(frame, centre1)) cen1 = starutil.get_task_par("DVAL", "astunformat") invoke("$ATOOLS_DIR/astunformat {0} 2 {1}".format(frame, centre2)) cen2 = starutil.get_task_par("DVAL", "astunformat") region = NDG.tempfile() invoke( "$ATOOLS_DIR/astcircle {0} 1 \[{1},{2}\] {3} ! ! {4}".format( frame, cen1, cen2, math.radians(radius / 60.0), region)) # If a Region was supplied ,not we do not yet have the coordinates of # the centre of the required region, and note if the Region is defined by
# south pole). The above call to jsatileinfo will have determined the # appropriate projection to use, so get it. proj = starutil.get_task_par("PROJ", "jsatilelist") # Create a file holding the FITS-WCS header for the first tile, using # the type of projection determined above. head = "{0}/header".format(NDG.tempdir) invoke("$SMURF_DIR/jsatileinfo itile={0} instrument={1} header={2} " "proj={3} quiet".format(tiles[0], instrument, head, proj)) # Get the lower pixel index bounds of the first tile. lx = int(starutil.get_task_par("LBND(1)", "jsatileinfo")) ly = int(starutil.get_task_par("LBND(2)", "jsatileinfo")) # Create a 1x1 NDF and store the tile headers in the FITS extension. ref = NDG(1) invoke("$KAPPA_DIR/creframe out={0} mode=fl mean=0 lbound=\[{1},{2}\] " "ubound=\[{1},{2}\]".format(ref, lx, ly)) invoke("$KAPPA_DIR/fitstext ndf={0} file={1}".format(ref, head)) # Get the nominal spatial pixel size of the supplied NDF. invoke("$KAPPA_DIR/ndftrace ndf={0} quiet".format(inndf)) pixsize1 = float(starutil.get_task_par("FPIXSCALE(1)", "ndftrace")) pixsize2 = float(starutil.get_task_par("FPIXSCALE(2)", "ndftrace")) pixsize_in = math.sqrt(pixsize1 * pixsize2) # Get the nominal tile pixel size. invoke("$KAPPA_DIR/ndftrace ndf={0} quiet".format(ref)) pixsize1 = float(starutil.get_task_par("FPIXSCALE(1)", "ndftrace")) pixsize2 = float(starutil.get_task_par("FPIXSCALE(2)", "ndftrace")) pixsize_tile = math.sqrt(pixsize1 * pixsize2)
retain = parsys["RETAIN"].value outbase = parsys["OUT"].value fakemap = parsys["FAKEMAP"].value # Erase any NDFs holding cleaned data, exteinction or pointing data from # previous runs. for path in glob.glob("*_con_res_cln.sdf"): myremove(path) base = path[:-16] myremove("{0}_lat.sdf".format(base)) myremove("{0}_lon.sdf".format(base)) myremove("{0}_con_ext.sdf".format(base)) # Use sc2concat to concatenate and flatfield the data. msg_out( "Concatenating and flatfielding..." ) concbase = NDG.tempfile("") invoke("$SMURF_DIR/sc2concat in={0} outbase={1} maxlen=360".format(indata,concbase)) concdata = NDG( "{0}_*".format(concbase) ) # Use makemap to generate quality, extinction and pointing info. confname = NDG.tempfile() fd = open(confname,"w") fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n") fd.write("numiter=1\n") fd.write("exportclean=1\n") fd.write("exportndf=ext\n") fd.write("exportlonlat=1\n") fd.write("dcfitbox=0\n") fd.write("noisecliphigh=0\n") fd.write("order=0\n") fd.write("downsampscale=0\n")
sigma = 0.0 # See if old temp files are to be re-used. restart = parsys["RESTART"].value if restart is None: retain = parsys["RETAIN"].value indata.save("IN") else: retain = True NDG.tempdir = restart if not os.path.isdir(restart): raise UsageError( "\n\nThe directory specified by parameter RESTART ({0}) " "does not exist".format(restart)) fred = NDG.load("IN", True) if indata != fred: raise UsageError( "\n\nThe directory specified by parameter RESTART ({0}) " "refers to different time-series data".format(restart)) msg_out("Re-using data in {0}".format(restart)) # Initialise the starlink random number seed to a known value so that # results are repeatable. os.environ["STAR_SEED"] = "65" # Has the indput data been flatfielded? Test the first supplied input NDF. ff = None try: if "smf_flatfield" in invoke( "$HDSTRACE_DIR/hdstrace {0}.more.smurf.smurfhist".format(
basec1 = math.radians( basec1 ) basec2 = math.radians( basec2 ) # Get the radius of the map. radius = 0.5*math.sqrt( map_hght*map_hght + map_wdth*map_wdth ) # Create a Frame describing the coordinate system. if tracksys == "GAL": sys = "galactic"; elif tracksys == "J2000": sys = "fk5" else: raise starutil.InvalidParameterError("The TRACKSYS header in {0} is {1} " "- should be GAL or J2000".format(indata,tracksys) ) frame = NDG.tempfile() invoke( "$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(sys,frame) ) # Create a Circle describing the map. if region == None: region = NDG.tempfile() display = True else: display = False invoke( "$ATOOLS_DIR/astcircle frame={0} form=1 centre=\[{1},{2}\] point={3} " "unc=! options=! result={4}".format(frame,basec1,basec2,radius,region) ) if display: f = open( region, "r" ) print( f.read() )
elif not os.path.exists(qudir): os.makedirs(qudir) # Get the reference direction. north = parsys["NORTH"].value # Classify each input data file as raw, QUI time-series or QUI map. Create # three separate text files containing all input NDFs of each type (plus # a fourth holing non-POL2 data). Also, create another text file # containing a list of ony missing raw sub-scan files. junks = NDG.tempfile() inraws = NDG.tempfile() inquis = NDG.tempfile() inmaps = NDG.tempfile() rawinfo = NDG.tempfile() missing = NDG.tempfile() invoke("$SMURF_DIR/pol2check in={0} quiet=yes junkfile={1} mapfile={2} " "rawfile={3} stokesfile={4} rawinfo={5} missing={6}". format(indata,junks,inmaps,inraws,inquis,rawinfo,missing)) # Warn about any non-POL2 input data files that are being ignored. if get_task_par( "JUNKFOUND", "pol2check" ): msg_out( " ") msg_out( "WARNING: The following inappropriate input data files are " "being ignored: " ) with open( junks ) as f:
# Get a flag indicating if the tile's master NDF existed before the # above invocation of "tileinfo". existed = starutil.get_task_par("exists", "tileinfo") # Get the 2D spatial pixel index bounds of the master tile. tlbnd = starutil.get_task_par("lbnd", "tileinfo") tubnd = starutil.get_task_par("ubnd", "tileinfo") # If the NDFs are not gridded using the JSA all-sky grid appropriate to # the specified instrument, then we need to resample them onto that grid # before coadding the new and old data. We only need do this for the # first tile for each input NDF, since all tiles are aligned on the same # pixel grid. if aligned is None: if not jsa: aligned = NDG(1)[0] invoke("$KAPPA_DIR/wcsalign in={0} ref={1} out={2} lbnd=! " "method=bilin".format(ndf, tilendf, aligned)) else: aligned = ndf # Get the pixel index bounds of the aligned NDF. invoke("$KAPPA_DIR/ndftrace ndf={0}".format(aligned)) nlbnd = starutil.get_task_par("lbound", "ndftrace") nubnd = starutil.get_task_par("ubound", "ndftrace") # Get the 2D spatial pixel index bounds of the overlap of the current tile # and the aligned NDF. olbnd = [1, 1] oubnd = [0, 0] for i in (0, 1):
if starutil.get_task_par("numgood", "stats") > 0: # If so, append the section to the list of NDFs to be included in the output. tilendf.append(sec) itilelist.append(itile) # Raise an exception if no data is available for the tiles overlap msg_out(" ") if len(tilendf) == 0: raise starutil.StarUtilError( "No JSA {0} data is available " "for the requested region.".format(instrument)) # Otherwise, paste the sections together to form the output NDF. else: tiles = NDG(tilendf) invoke("$KAPPA_DIR/paste in={0} out={1}".format(tiles, outdata)) msg_out("Created output NDF {0} from tiles {1}".format( outdata, itilelist)) # Remove temporary files. cleanup() # If an StarUtilError of any kind occurred, display the message but hide the # python traceback. To see the trace back, uncomment "raise" instead. except starutil.StarUtilError as err: # raise print(err) cleanup() # This is to trap control-C etc, so that we can clean up temp files.
# Get the quantity to use as the vector lengths (could be "None") plot = parsys["PLOT"].value # If any vectors are to be plotted, get the SNR limit for the plotted # vectors. if plot != None: snr = parsys["SNR"].value maxlen = parsys["MAXLEN"].value # See if temp files are to be retained. retain = parsys["RETAIN"].value # The following call to SMURF:CALCQU creates two HDS container files - # one holding a set of Q NDFs and the other holding a set of U NDFs. Create # these container files in the NDG temporary directory. qcont = NDG(1) qcont.comment = "qcont" ucont = NDG(1) ucont.comment = "ucont" # Create a set of Q images and a set of U images. These are put into the HDS # container files "q_TMP.sdf" and "u_TMP.sdf". Each image contains Q or U # values derived from a short section of raw data during which each bolometer # moves less than half a pixel. msg_out( "Calculating Q and U values for each bolometer...") invoke("$SMURF_DIR/calcqu in={0} config={1} outq={2} outu={3} fix". format(indata,config,qcont,ucont) ) # Remove spikes from the Q and U images. The cleaned NDFs are written to # temporary NDFs specified by two new NDG objects "qff" and "uff", which # inherit their size from the existing groups "qcont" and "ucont".
basec2 = math.radians(basec2) # Get the radius of the map. radius = 0.5 * math.sqrt(map_hght * map_hght + map_wdth * map_wdth) # Create a Frame describing the coordinate system. if tracksys == "GAL": sys = "galactic" elif tracksys == "J2000": sys = "fk5" else: raise starutil.InvalidParameterError( "The TRACKSYS header in {0} is {1} " "- should be GAL or J2000".format(indata, tracksys)) frame = NDG.tempfile() invoke("$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(sys, frame)) # Create a Circle describing the map. if region is None: region = NDG.tempfile() display = True else: display = False invoke( "$ATOOLS_DIR/astcircle frame={0} form=1 centre=\[{1},{2}\] point={3} " "unc=! options=! result={4}".format(frame, basec1, basec2, radius, region)) if display:
cen2 = None if region == None : system = parsys["SYSTEM"].value if system == "ICRS" : parsys["CENTRE1"].prompt = "RA at centre of required circle" parsys["CENTRE2"].prompt = "Dec at centre of required circle" else: parsys["CENTRE1"].prompt = "Galactic longitude at centre of required circle" parsys["CENTRE2"].prompt = "Galactic latitude at centre of required circle" centre1 = parsys["CENTRE1"].value if centre1 != None: centre2 = parsys["CENTRE2"].value radius = parsys["RADIUS"].value frame = NDG.tempfile() invoke( "$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(system,frame) ) invoke( "$ATOOLS_DIR/astunformat {0} 1 {1}".format(frame,centre1) ) cen1 = starutil.get_task_par( "DVAL", "astunformat" ) invoke( "$ATOOLS_DIR/astunformat {0} 2 {1}".format(frame,centre2) ) cen2 = starutil.get_task_par( "DVAL", "astunformat" ) region = NDG.tempfile() invoke( "$ATOOLS_DIR/astcircle {0} 1 \[{1},{2}\] {3} ! ! {4}". format(frame,cen1,cen2,math.radians(radius/60.0),region) ) # If a Region was supplied ,not we do not yet have the coordinates of # the centre of the required region, and note if the Region is defined by # an NDF. else:
fd.write("flt.filt_edgehigh_last=<undef>\n") # final iteration. We fd.write("flt.filt_edgelow_last=<undef>\n") # reset them here in fd.write("flt.whiten_last=<undef>\n") # case they are set in fd.write("com.perarray_last=<undef>\n") # the supplied config. if precleaned: fd.write("downsampscale = 0\n") # Cleaned data will have been downsampled already. fd.write("downsampfreq = 0\n") fd.close() # Close the config file. # Get the name of a temporary NDF that can be used to store the first # iteration map. This NDF is put in the NDG temp directory. If we are # only doing one iteration, used the supplied output NDF name. if niter == 1: newmap = outdata else: newmap = NDG(1) prevmap = None # Start a list of these maps in case we are creating an output itermap cube. maps = [] maps.append(newmap) # If we are restarting, check if the NDF already exists and is readable. # If so, we do not re-create it. msg_out( "Iteration 1...") gotit = False if restart != None: try: invoke("$KAPPA_DIR/ndftrace ndf={0} quiet=yes".format(newmap)) msg_out( "Re-using existing map {0}".format(newmap) ) gotit = True
def get_filtered_skydip_data(qarray,uarray,clip,a): """ This function takes q and u array data (output from calcqu), applies ffclean to remove spikes and puts in numpy array variable It borrows (copies) heavily from pol2cat.py (2015A) Invocation: ( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref ) = ... get_filtered_skydip_data(qarray,uarray,clip,a) Arguments: qarray = An NDF of Q array data (output from calcqu). uarray = An NDF of U array data (output form calcqu). clip = The sigma cut for ffclean. a = A string indicating the array (eg. 'S8A'). Returned Value: qdata_total = A numpy array with the cleaned qarray data. qvar_total = A numpy array with the qarray variance data. udata_total = A numpy array with the cleaned uarray data. uvar_total = A numpy array with the uarray variance data. elevation = A numpy array with the elevation data opacity_term = A numpy array with the opacity brightness term (1-exp(-tau*air_mass)) Here tau is calculated using the WVM data as input. """ # Remove spikes from the Q images for the current subarray. The cleaned NDFs # are written to temporary NDFs specified by the new NDG object "qff", which # inherit its size from the existing group "qarray"". msg_out( "Removing spikes from {0} bolometer Q values...".format(a)) qff = NDG(qarray) qff.comment = "qff" invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".format(qarray,qff,clip) ) # Remove spikes from the U images for the current subarray. The cleaned NDFs # are written to temporary NDFs specified by the new NDG object "uff", which # inherit its size from the existing group "uarray"". msg_out( "Removing spikes from {0} bolometer U values...".format(a)) uff = NDG(uarray) uff.comment = "uff" invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]" .format(uarray,uff,clip) ) elevation = [] opacity_term = [] for stare in range(len(qff[:])): # Stack Q data in numpy array # Get elevation information elevation.append(numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=ELSTART".format( qff[ stare ] ) ) ) ) ) # Get Tau (Opacity) information tau_temp = numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=WVMTAUST".format( qff[ stare ] ) ) ) ) # Convert to obs band. if '4' in a: tau_temp = 19.04*(tau_temp-0.018) # Eq from Dempsey et al elif '8' in a: tau_temp = 5.36*(tau_temp-0.006) # Eq from Dempsey et al. opacity_term.append(1-numpy.exp(-1*tau_temp/numpy.sin(numpy.radians(elevation[-1])))) invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(qff[ stare ])) nx = get_task_par( "dims(1)", "ndftrace" ) ny = get_task_par( "dims(2)", "ndftrace" ) qdata_temp = numpy.reshape( Ndf( qff[ stare ] ).data, (ny,nx)) qdata_temp[numpy.abs(qdata_temp)>1e300] = numpy.nan; if stare == 0: qdata_total = qdata_temp else: qdata_total = numpy.dstack((qdata_total,qdata_temp)) qvar_temp = numpy.reshape( Ndf( qff[ stare ] ).var, (ny,nx)) qdata_temp[numpy.abs(qvar_temp)>1e300] = numpy.nan; if stare == 0: qvar_total = qvar_temp else: qvar_total = numpy.dstack((qvar_total,qvar_temp)) # Stack U data in numpy array invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(uff[ stare ])) nx = get_task_par( "dims(1)", "ndftrace" ) ny = get_task_par( "dims(2)", "ndftrace" ) udata_temp = numpy.reshape( Ndf( uff[ stare ] ).data, (ny,nx)) udata_temp[numpy.abs(udata_temp)>1e300] = numpy.nan; if stare == 0: udata_total = udata_temp else: udata_total = numpy.dstack((udata_total,udata_temp)) uvar_temp = numpy.reshape( Ndf( uff[ stare ] ).var, (ny,nx)) udata_temp[numpy.abs(uvar_temp)>1e300] = numpy.nan; if stare == 0: uvar_total = uvar_temp else: uvar_total = numpy.dstack((uvar_total,uvar_temp)) # Create bad pixel reference. bad_pixel_ref = NDG(1) invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(qff,uff,bad_pixel_ref)) return( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref )
retain = parsys["RETAIN"].value outbase = parsys["OUT"].value fakemap = parsys["FAKEMAP"].value # Erase any NDFs holding cleaned data, exteinction or pointing data from # previous runs. for path in glob.glob("*_con_res_cln.sdf"): myremove(path) base = path[:-16] myremove("{0}_lat.sdf".format(base)) myremove("{0}_lon.sdf".format(base)) myremove("{0}_con_ext.sdf".format(base)) # Use sc2concat to concatenate and flatfield the data. msg_out("Concatenating and flatfielding...") concbase = NDG.tempfile("") invoke("$SMURF_DIR/sc2concat in={0} outbase={1} maxlen=360".format( indata, concbase)) concdata = NDG("{0}_*".format(concbase)) # Use makemap to generate quality, extinction and pointing info. confname = NDG.tempfile() fd = open(confname, "w") fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n") fd.write("numiter=1\n") fd.write("exportclean=1\n") fd.write("exportndf=ext\n") fd.write("exportlonlat=1\n") fd.write("dcfitbox=0\n") fd.write("noisecliphigh=0\n") fd.write("order=0\n")
phase16 = 0.0 sigma = 0.0 # See if old temp files are to be re-used. restart = parsys["RESTART"].value if restart == None: retain = parsys["RETAIN"].value indata.save( "IN" ) else: retain = True NDG.tempdir = restart if not os.path.isdir(restart): raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) " "does not exist".format(restart) ) fred = NDG.load( "IN", True ) if indata != fred: raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) " "refers to different time-series data".format(restart) ) msg_out( "Re-using data in {0}".format(restart) ) # Initialise the starlink random number seed to a known value so that # results are repeatable. os.environ["STAR_SEED"] = "65" # Flat field the supplied template data ff = NDG.load( "FF" ) if not ff: ffdir = NDG.subdir() msg_out( "Flatfielding template data...") invoke("$SMURF_DIR/flatfield in={0} out=\"{1}/*\"".format(indata,ffdir) )
fd.write("flt.filt_edgelow_last=<undef>\n") # reset them here in fd.write("flt.whiten_last=<undef>\n") # case they are set in fd.write("com.perarray_last=<undef>\n") # the supplied config. if precleaned: fd.write("downsampscale = 0\n" ) # Cleaned data will have been downsampled already. fd.write("downsampfreq = 0\n") fd.close() # Close the config file. # Get the name of a temporary NDF that can be used to store the first # iteration map. This NDF is put in the NDG temp directory. If we are # only doing one iteration, used the supplied output NDF name. if niter == 1: newmap = outdata else: newmap = NDG(1) prevmap = None # Start a list of these maps in case we are creating an output itermap cube. maps = [] maps.append(newmap) # If we are restarting, check if the NDF already exists and is readable. # If so, we do not re-create it. msg_out("Iteration 1...") gotit = False if restart is not None: try: invoke("$KAPPA_DIR/ndftrace ndf={0} quiet=yes".format(newmap)) msg_out("Re-using existing map {0}".format(newmap)) gotit = True