コード例 #1
0
def cleanup():
    global retain
    ParSys.cleanup()
    if retain:
        msg_out("Retaining temporary files in {0}".format(NDG.tempdir))
    else:
        NDG.cleanup()
コード例 #2
0
ファイル: jsajoin.py プロジェクト: astrobuff/starlink
def cleanup():
   global retain
   ParSys.cleanup()
   if retain:
      msg_out( "Retaining temporary files in {0}".format(NDG.tempdir))
   else:
      NDG.cleanup()
コード例 #3
0
ファイル: tounimap.py プロジェクト: milanbb/starlink
def cleanup():
   global retain
   try:
      starutil.ParSys.cleanup()
      if retain:
         msg_out( "Retaining temporary files in {0}".format(NDG.tempdir))
      else:
         NDG.cleanup()
   except:
      pass
コード例 #4
0
ファイル: tounimap.py プロジェクト: kakirastern/starlink
def cleanup():
    global retain
    try:
        starutil.ParSys.cleanup()
        if retain:
            msg_out("Retaining temporary files in {0}".format(NDG.tempdir))
        else:
            NDG.cleanup()
    except:
        pass
コード例 #5
0
def cleanup():
   global retain, new_ext_ndfs
   try:
      starutil.ParSys.cleanup()
      if retain:
         msg_out( "Retaining EXT models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir))
      else:
         NDG.cleanup()
         for ext in new_ext_ndfs:
            os.remove( ext )
   except:
      pass
コード例 #6
0
ファイル: pol2_ipdata.py プロジェクト: astrobuff/starlink
def run_calcqu(input_data,config,harmonic):
    #  The following call to SMURF:CALCQU creates two HDS container files -
    #  one holding a set of Q NDFs and the other holding a set of U NDFs. Create
    #  these container files in the NDG temporary directory.
    qcont = NDG(1)
    qcont.comment = "qcont"
    ucont = NDG(1)
    ucont.comment = "ucont"

    msg_out( "Calculating Q and U values for each bolometer...")
    invoke("$SMURF_DIR/calcqu in={0} config=\"{1}\" lsqfit=no outq={2} outu={3} "
           "harmonic={4} fix".format(input_data,starutil.shell_quote(config),
                                     qcont,ucont,harmonic) )
    return (qcont,ucont)
コード例 #7
0
def run_calcqu(input_data, config, harmonic):
    #  The following call to SMURF:CALCQU creates two HDS container files -
    #  one holding a set of Q NDFs and the other holding a set of U NDFs. Create
    #  these container files in the NDG temporary directory.
    qcont = NDG(1)
    qcont.comment = "qcont"
    ucont = NDG(1)
    ucont.comment = "ucont"

    msg_out("Calculating Q and U values for each bolometer...")
    invoke(
        "$SMURF_DIR/calcqu in={0} config=\"{1}\" lsqfit=no outq={2} outu={3} "
        "harmonic={4} fix".format(input_data, starutil.shell_quote(config),
                                  qcont, ucont, harmonic))
    return (qcont, ucont)
コード例 #8
0
def cleanup():
   global retain, new_ext_ndfs, new_lut_ndfs, new_noi_ndfs
   try:
      starutil.ParSys.cleanup()
      if retain:
         msg_out( "Retaining EXT, LUT and NOI models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir))
      else:
         NDG.cleanup()
         for ext in new_ext_ndfs:
            os.remove( ext )
         for lut in new_lut_ndfs:
            os.remove( lut )
         for noi in new_noi_ndfs:
            os.remove( noi )
   except:
      pass
コード例 #9
0
ファイル: skyloop.py プロジェクト: milanbb/starlink
def cleanup():
   global retain, new_ext_ndfs, new_lut_ndfs, new_noi_ndfs
   try:
      starutil.ParSys.cleanup()
      if retain:
         msg_out( "Retaining EXT, LUT and NOI models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir))
      else:
         NDG.cleanup()
         for ext in new_ext_ndfs:
            os.remove( ext )
         for lut in new_lut_ndfs:
            os.remove( lut )
         for noi in new_noi_ndfs:
            os.remove( noi )
         for res in qua:
            os.remove( res )
   except:
      pass
コード例 #10
0
ファイル: smurfutil.py プロジェクト: kakirastern/starlink
def force_flat(ins, masks):
    """

   Forces the background regions to be flat in a set of Q or U images.

   Invocation:
      result = force_flat( ins, masks )

   Arguments:
      in = NDG
         An NDG object specifying a group of Q or U images from which
         any low frequency background structure is to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

    #  How many NDFs are we processing?
    nndf = len(ins)

    #  Blank out sources by copy the bad pixels from "mask" into "in".
    msg_out("   masking...")
    qm = NDG(ins)
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins, masks, qm))

    #  Smooth the blanked NDFs using a 3 pixel Gaussian. Set wlim so that
    #  small holes are filled in by the smoothing process.
    msg_out("   smoothing...")
    qs = NDG(ins)
    invoke("$KAPPA_DIR/gausmooth in={0} out={1} fwhm=3 wlim=0.5".format(
        qm, qs))

    #  Fill remaining big holes using artifical data.
    msg_out("   filling...")
    qf = NDG(ins)
    invoke("$KAPPA_DIR/fillbad in={0} out={1} niter=10 size=10 variance=no".
           format(qs, qf))

    #  Subtract the filled low frequency data form the original to create the
    #  returned images.
    msg_out("   removing low frequency background structure...")
    result = NDG(ins)
    invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(ins, qf, result))

    return result
コード例 #11
0
ファイル: smurfutil.py プロジェクト: joaogerd/starlink
def force_flat( ins, masks ):
   """

   Forces the background regions to be flat in a set of Q or U images.

   Invocation:
      result = force_flat( ins, masks )

   Arguments:
      in = NDG
         An NDG object specifying a group of Q or U images from which
         any low frequency background structure is to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

#  How many NDFs are we processing?
   nndf = len( ins )

#  Blank out sources by copy the bad pixels from "mask" into "in".
   msg_out( "   masking...")
   qm = NDG( ins )
   invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins,masks,qm) )

#  Smooth the blanked NDFs using a 3 pixel Gaussian. Set wlim so that
#  small holes are filled in by the smoothing process.
   msg_out( "   smoothing...")
   qs = NDG( ins )
   invoke( "$KAPPA_DIR/gausmooth in={0} out={1} fwhm=3 wlim=0.5".format(qm,qs) )

#  Fill remaining big holes using artifical data.
   msg_out( "   filling...")
   qf = NDG( ins )
   invoke( "$KAPPA_DIR/fillbad in={0} out={1} niter=10 size=10 variance=no".format(qs,qf) )

#  Subtract the filled low frequency data form the original to create the
#  returned images.
   msg_out( "   removing low frequency background structure...")
   result = NDG( ins )
   invoke( "$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(ins,qf,result) )

   return result
コード例 #12
0
def fitquad(text,a,b,c,xvals,yvals):
   global qxlist, qylist

   if len(xvals) != len(yvals):
      raise UsageError("fitquad: length of X and Y arrays differ in "
                       "fit: {0}.".format(text) )
   msg_out( "\n\n Doing quadratic fit: '{0}'...".format(text) )

#  Scale the Y values so that they cover the range -100 to +100.
   ymax = max( yvals )
   ymin = min( yvals )
   alpha = 200/(ymax-ymin)
   beta = 100 - alpha*ymax
   qylist = [alpha*y+beta for y in yvals]

#  Scale the X values so that they cover the range -1 to +1.
   xmax = max( xvals )
   xmin = min( xvals )
   gam = 2/(xmax-xmin)
   delta = 1 - gam*xmax
   qxlist = [gam*x+delta for x in xvals]

#  Find a quadratic fit to the scaled X and Y values, iterating to
#  reject outliers.
   for i in range(0,5):
      msg_out( "\nIteration {0}: Fitting to {1} data points...".format(i+1,len(qxlist)) )

#  Initial guess at model parameters.
      x0 = np.array([a,b,c])

#  Do a fit to find the optimum model parameters.
      res = minimize( objfunquad, x0, method='nelder-mead',
                      options={'xtol': 1e-5, 'disp': True})

#  Find RMS residual between data and fit.
      rms = residquad( res.x )
      msg_out( "   Fit: {0}    RMS: {1}".format(res.x, rms) )

#  Remove points more than 2 sigma from the fit.
      rejectquad( 2*rms, res.x )

#  Scale the best fit parameters so that they refer to the unscaled X
#  and Y values.
   a = ( res.x[0] + res.x[1]*delta + res.x[2]*delta*delta - beta )/alpha
   b = ( res.x[1]*gam + 2*res.x[2]*gam*delta )/alpha
   c = ( res.x[2]*gam*gam )/alpha

#  return results.
   return (a,b,c)
コード例 #13
0
ファイル: pol2scan.py プロジェクト: edwardchapin/starlink
   if not iref:
      iref = "!"
   qref = parsys["QREF"].value
   uref = parsys["UREF"].value

#  If no Q and U values were supplied, create a set of Q and U time
#  streams from the supplied analysed intensity time streams. Put them in
#  the QUDIR directory, or the temp directory if QUDIR is null.
   if inqu == None:
      qudir =  parsys["QUDIR"].value
      if not qudir:
         qudir = NDG.tempdir
      elif not os.path.exists(qudir):
         os.makedirs(qudir)

      msg_out( "Calculating Q and U time streams for each bolometer...")
      invoke("$SMURF_DIR/calcqu in={0} lsqfit=yes config=def outq={1}/\*_QT "
             "outu={1}/\*_UT fix=yes".format( indata, qudir ) )

#  Get groups listing the time series files created by calcqu.
      qts = NDG( "{0}/*_QT".format( qudir ) )
      uts = NDG( "{0}/*_UT".format( qudir ) )

#  If pre-calculated Q and U values were supplied, identifiy the Q and U
#  files.
   else:
      msg_out( "Using pre-calculating Q and U values...")

      qndfs = []
      undfs = []
      for ndf in inqu:
コード例 #14
0
ファイル: pol2cat.py プロジェクト: andrecut/starlink
#  See if temp files are to be retained.
   retain = parsys["RETAIN"].value

#  The following call to SMURF:CALCQU creates two HDS container files -
#  one holding a set of Q NDFs and the other holding a set of U NDFs. Create
#  these container files in the NDG temporary directory.
   qcont = NDG(1)
   qcont.comment = "qcont"
   ucont = NDG(1)
   ucont.comment = "ucont"

#  Create a set of Q images and a set of U images. These are put into the HDS
#  container files "q_TMP.sdf" and "u_TMP.sdf". Each image contains Q or U
#  values derived from a short section of raw data during which each bolometer
#  moves less than half a pixel.
   msg_out( "Calculating Q and U values for each bolometer...")
   invoke("$SMURF_DIR/calcqu in={0} config={1} outq={2} outu={3} fix".
          format(indata,config,qcont,ucont) )

#  Remove spikes from the Q and U images. The cleaned NDFs are written to
#  temporary NDFs specified by two new NDG objects "qff" and "uff", which
#  inherit their size from the existing groups "qcont" and "ucont".
   msg_out( "Removing spikes from bolometer Q and U values...")
   qff = NDG(qcont)
   qff.comment = "qff"
   uff = NDG(ucont)
   uff.comment = "uff"
   invoke( "$KAPPA_DIR/ffclean in={0} out={1} box=3 clip=\[2,2,2\]"
           .format(qcont,qff) )
   invoke( "$KAPPA_DIR/ffclean in={0} out={1} box=3 clip=\[2,2,2\]"
           .format(ucont,uff) )
コード例 #15
0
ファイル: tilepaste.py プロジェクト: kakirastern/starlink
                deflt = "DAS"

            else:
                deflt = None

    except:
        deflt = None

    if deflt is not None:
        parsys["INSTRUMENT"].default = deflt
        parsys["INSTRUMENT"].noprompt = True

#  Get the JCMT instrument. Quote the string so that it can be used as
#  a command line argument when running an atask from the shell.
    instrument = starutil.shell_quote(parsys["INSTRUMENT"].value)
    msg_out("Updating tiles for {0} data".format(instrument))

    #  See if temp files are to be retained.
    retain = parsys["RETAIN"].value

    #  Set up the dynamic default for parameter "JSA". This is True if the
    #  dump of the WCS FrameSet in the first supplied NDF contains the string
    #  "HPX".
    prj = invoke(
        "$KAPPA_DIR/wcsattrib ndf={0} mode=get name=projection".format(
            indata[0]))
    parsys["JSA"].default = True if prj.strip() == "HEALPix" else False

    #  See if input NDFs are on the JSA all-sky pixel grid.
    jsa = parsys["JSA"].value
    if not jsa:
コード例 #16
0
ファイル: pol2sim.py プロジェクト: wadawson/starlink
#  Do not use more com files for each sub-array than are needed.
      remlist = []
      for subarr in ( "s8a", "s8b", "s8c", "s8d", "s4a", "s4b", "s4c", "s4d" ):
         nin = 0
         for ndf in indata:
            if subarr in ndf:
               nin += 1

         ncom = 0
         for ndf in incom:
            if subarr in ndf:
               ncom += 1
               if ncom > nin:
                  remlist.append( ndf )

      msg_out("Ignoring {0} surplus files in INCOM".format(len(remlist) ))
      for ndf in remlist:
        incom.remove( ndf )

#  See if new artificial I, Q and U maps are to be created.
   newart = parsys["NEWART"].value

#  If not, set the ART parameters to indicate that the specified NDFs
#  must already exist.
   if not newart:
      parsys["ARTI"].exists = True
      parsys["ARTQ"].exists = True
      parsys["ARTU"].exists = True
   else:
      parsys["ARTI"].exists = False
      parsys["ARTQ"].exists = False
コード例 #17
0
ファイル: rawregion.py プロジェクト: dt888/starlink
#  user being prompted for a value.
      if instrument != None:
         parsys["INSTRUMENT"].default = instrument
         parsys["INSTRUMENT"].noprompt = True

#  Get the chosen instrument.
      instrument = parsys["INSTRUMENT"].value
      instrument = starutil.shell_quote( instrument )

#  Get a list of the tiles that overlap the Region.
      invoke( "$SMURF_DIR/jsatilelist in={0} instrument={1} quiet".format(region,instrument) )
      tiles = starutil.get_task_par( "TILES", "jsatilelist" )

#  List them.
      for tile in tiles:
         msg_out( "Tile {0} touches {1}".format(tile, indata))

#  Remove temporary files.
   cleanup()

#  If an StarUtilError of any kind occurred, display the message but hide the
#  python traceback. To see the trace back, uncomment "raise" instead.
except starutil.StarUtilError as err:
#  raise
   print( err )
   cleanup()

# This is to trap control-C etc, so that we can clean up temp files.
except:
   cleanup()
   raise
コード例 #18
0
ファイル: pol2stack.py プロジェクト: sladen/starlink
#  We only arrive here if the POLANAL- frame was found, so rename it to POLANAL
      invoke( "$KAPPA_DIR/wcsattrib ndf={0} mode=set name=domain newval=POLANAL".format(cube) )

#  Re-instate the original current Frame
   invoke( "$KAPPA_DIR/wcsframe ndf={0} frame={1}".format(cube,domain) )

#  POLPACK needs to know the order of I, Q and U in the 3D cube. Store
#  this information in the POLPACK enstension within "cube.sdf".
   invoke( "$POLPACK_DIR/polext in={0} stokes=qui".format(cube) )

#  Create a FITS catalogue containing the polarisation vectors.
   command = "$POLPACK_DIR/polvec in={0} cat={1} debias={2}".format(cube,outcat,debias)
   if pimap:
      command = "{0} ip={1}".format(command,pimap)
      msg_out( "Creating the output catalogue {0} and polarised intensity map {1}...".format(outcat,pimap) )
   else:
      msg_out( "Creating the output catalogue: {0}...".format(outcat) )
   msg = invoke( command )
   msg_out( "\n{0}\n".format(msg) )

#  Remove temporary files.
   cleanup()

#  If an StarUtilError of any kind occurred, display the message but hide the
#  python traceback. To see the trace back, uncomment "raise" instead.
except starutil.StarUtilError as err:
#  raise
   print( err )
   cleanup()
コード例 #19
0
ファイル: smurfutil.py プロジェクト: bbrond/starlink
def pca( indata, ncomp ):
   """

   Identifies and returns the strongest PCA components in a 3D NDF.

   Invocation:
      result = pca( indata, ncomp )

   Arguments:
      indata = NDG
         An NDG object specifying a single 3D NDF. Each plane in the cube
         is a separate image, and the images are compared using PCA.
      ncomp = int
         The number of PCA components to include in the returned NDF.

   Returned Value:
      A new NDG object containing a single 3D NDF containing just the
      strongest "ncomp" PCA components found in the input NDF.

   """

   msg_out( "   finding strongest {0} components using Principal Component Analysis...".format(ncomp) )

#  Get the shape of the input NDF.
   invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(indata) )
   nx = get_task_par( "dims(1)", "ndftrace" )
   ny = get_task_par( "dims(2)", "ndftrace" )
   nz = get_task_par( "dims(3)", "ndftrace" )

#  Fill any bad pixels.
   tmp = NDG(1)
   invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=no niter=10 size=10".format(indata,tmp) )

#  Read the planes from the supplied NDF. Note, numpy axis ordering is the
#  reverse of starlink axis ordering. We want a numpy array consisting of
#  "nz" elements, each being a vectorised form of a plane from the 3D NDF.
   ndfdata = numpy.reshape( Ndf( tmp[0] ).data, (nz,nx*ny) )

#  Normalize each plane to a mean of zero and standard deviation of 1.0
   means = []
   sigmas = []
   newdata = []
   for iplane in range(0,nz):
      plane = ndfdata[ iplane ]
      mn = plane.mean()
      sg = math.sqrt( plane.var() )
      means.append( mn )
      sigmas.append( sg )

      if sg > 0.0:
         newdata.append( (plane-mn)/sg )

   newdata= numpy.array( newdata )

#  Transpose as required by MDP.
   pcadata = numpy.transpose( newdata )

#  Find the required number of PCA components (these are the strongest
#  components).
   pca = mdp.nodes.PCANode( output_dim=ncomp )
   comp = pca.execute( pcadata )

#  Re-project the components back into the space of the input 3D NDF.
   ip = numpy.dot( comp, pca.get_recmatrix() )

#  Transpose the array so that each row is an image.
   ipt = numpy.transpose(ip)

#  Normalise them back to the original scales.
   jplane = 0
   newdata = []
   for iplane in range(0,nz):
      if sigmas[ iplane ] > 0.0:
         newplane = sigmas[ iplane ] * ipt[ jplane ] + means[ iplane ]
         jplane += 1
      else:
         newplane = ndfdata[ iplane ]
      newdata.append( newplane )
   newdata= numpy.array( newdata )

#  Dump the re-projected images out to a 3D NDF.
   result = NDG(1)
   indf = ndf.open( result[0], 'WRITE', 'NEW' )
   indf.new('_DOUBLE', 3, numpy.array([1,1,1]),numpy.array([nx,ny,nz]))
   ndfmap = indf.map( 'DATA', '_DOUBLE', 'WRITE' )
   ndfmap.numpytondf( newdata )
   indf.annul()

#  Uncomment to dump the components.
#   msg_out( "Dumping PCA comps to {0}-comps".format(result[0]) )
#   compt = numpy.transpose(comp)
#   indf = ndf.open( "{0}-comps".format(result[0]), 'WRITE', 'NEW' )
#   indf.new('_DOUBLE', 3, numpy.array([1,1,1]),numpy.array([nx,ny,ncomp]))
#   ndfmap = indf.map( 'DATA', '_DOUBLE', 'WRITE' )
#   ndfmap.numpytondf( compt )
#   indf.annul()

   return result
コード例 #20
0
ファイル: smurfutil.py プロジェクト: joaogerd/starlink
def remove_corr( ins, masks ):
   """

   Masks the supplied set of Q or U images and then looks for and removes
   correlated components in the background regions.

   Invocation:
      result = remove_corr( ins, masks )

   Arguments:
      ins = NDG
         An NDG object specifying a group of Q or U images from which
         correlated background components are to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

#  How many NDFs are we processing?
   nndf = len( ins )

#  Blank out sources by copy the bad pixels from "mask" into "in". We refer
#  to "q" below, but the same applies whether processing Q or U.
   msg_out( "   masking...")
   qm = NDG( ins )
   invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins,masks,qm) )

#  Find the most correlated pair of imagtes. We use the basic correlation
#  coefficient calculated by kappa:scatter for this.
   msg_out( "   Finding most correlated pair of images...")
   cmax = 0
   for i in range(0,nndf-1):
      for j in range(i + 1,nndf):
         invoke( "$KAPPA_DIR/scatter in1={0} in2={1} device=!".format(qm[i],qm[j]) )
         c = starutil.get_task_par( "corr", "scatter" )
         if abs(c) > abs(cmax):
            cmax = c
            cati = i
            catj = j

   if abs(cmax) < 0.3:
      msg_out("   No correlated images found!")
      return ins

   msg_out( "   Correlation for best pair of images = {0}".format( cmax ) )

#  Find images that are reasonably correlated to the pair found above,
#  and coadd them to form a model for the correlated background
#  component. Note, the holes left by the masking are filled in by the
#  coaddition using background data from other images.
   msg_out( "   Forming model...")

#  Form the average of the two most correlated images, first normalising
#  them to a common scale so that they both have equal weight.
   norm = "{0}/norm".format(NDG.tempdir)
   if not normer( qm[cati], qm[catj], 0.3, norm ):
      norm = qm[cati]

   mslist = NDG( [ qm[catj], norm ] )
   ave = "{0}/ave".format(NDG.tempdir)
   invoke( "$CCDPACK_DIR/makemos in={0} method=mean genvar=no usevar=no out={1}".format(mslist,ave) )

#  Loop round each image finding the correlation factor of the image and
#  the above average image.
   temp = "{0}/temp".format(NDG.tempdir)
   nlist = []
   ii = 0
   for i in range(0,nndf):
      c = blanker( qm[i], ave, temp )

#  If the correlation is high enough, normalize the image to the average
#  image and then include the normalised image in the list of images to be
#  coadded to form the final model.
      if abs(c) > 0.3:
         tndf = "{0}/t{1}".format(NDG.tempdir,ii)
         ii += 1
         invoke( "$KAPPA_DIR/normalize in1={1} in2={2} out={0} device=!".format(tndf,temp,ave))
         nlist.append( tndf )

   if ii == 0:
      msg_out("   No secondary correlated images found!")
      return ins

   msg_out("   Including {0} secondary correlated images in the model.".format(ii) )

#  Coadded the images created above to form the model of the correlated
#  background component. Fill any remaining bad pixels with artificial data.
   model = "{0}/model".format(NDG.tempdir)
   included = NDG( nlist )
   invoke( "$CCDPACK_DIR/makemos in={0} method=mean usevar=no genvar=no out={1}".format( included, temp ) )
   invoke( "$KAPPA_DIR/fillbad in={1} variance=no out={0} size=10 niter=10".format(model,temp) )

#  Now estimate how much of the model is present in each image and remove it.
   msg_out("   Removing model...")
   temp2 = "{0}/temp2".format(NDG.tempdir)
   qnew = NDG(ins)
   nbetter = 0
   for i in range(0,nndf):

#  Try to normalise the model to the current image. This fails if the
#  correlation between them is too low.
      if normer( model, qm[i], 0.3, temp ):

#  Remove the scaled model form the image.
         invoke( "$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(ins[i],temp,temp2) )

#  We now check that removing the correlated background component has in
#  fact made the image flatter (poor fits etc can mean that images that
#  are poorly correlated to the model have a large amount of model
#  removed and so make the image less flat). FInd the standard deviation
#  of the data in the original image and in the corrected image.
         invoke( "$KAPPA_DIR/stats {0} quiet".format(ins[i]) )
         oldsig = get_task_par( "sigma", "stats" )

         invoke( "$KAPPA_DIR/stats {0} quiet".format(temp2) )
         newsig = get_task_par( "sigma", "stats" )

#  If the correction has made the image flatter, copy it to the returned NDG.
         if newsig < oldsig:
            nbetter += 1
            invoke( "$KAPPA_DIR/ndfcopy in={1} out={0}".format(qnew[i],temp2) )
         else:
            invoke( "$KAPPA_DIR/ndfcopy in={0} out={1}".format(ins[i],qnew[i]) )

#  If the input image is poorly correlated to the model, return the input
#  image unchanged.
      else:
         invoke( "$KAPPA_DIR/ndfcopy in={0} out={1}".format(ins[i],qnew[i]) )

   msg_out( "   {0} out of {1} images have been improved.".format(nbetter,nndf) )

#  Return the corrected images.
   return qnew
コード例 #21
0
ファイル: jsajoin.py プロジェクト: kakirastern/starlink
                tile_dict[jsatile] = tile

#  Create a list holding the paths to the tile NDFs that intersect
#  the required region.
        ntile = 0
        used_tile_list = []
        for jsatile in jsatile_list:
            key = str(jsatile)
            if key in tile_dict and tile_dict[key]:
                used_tile_list.append(tile_dict[key])
                ntile += 1

#  Create an NDG holding the group of tile NDFs.
        if ntile > 0:
            msg_out(
                "{0} of the supplied tiles intersect the requested region.".
                format(ntile))
            used_tiles = NDG(used_tile_list)
        else:
            raise starutil.InvalidParameterError(
                "None of the supplied JSA tiles "
                "intersect the requested region")

#  If we are using all tiles, just use the supplied group of tiles. Use
#  the middle supplied tile as the reference.
    else:
        used_tiles = tiles
        jsatile = int(len(tiles) / 2)
        jsatile = starutil.get_fits_header(tiles[jsatile], "TILENUM")

#  Paste these tile NDFs into a single image. This image still uses the
コード例 #22
0
        if instrument is not None:
            parsys["INSTRUMENT"].default = instrument
            parsys["INSTRUMENT"].noprompt = True

#  Get the chosen instrument.
        instrument = parsys["INSTRUMENT"].value
        instrument = starutil.shell_quote(instrument)

        #  Get a list of the tiles that overlap the Region.
        invoke("$SMURF_DIR/jsatilelist in={0} instrument={1} quiet".format(
            region, instrument))
        tiles = starutil.get_task_par("TILES", "jsatilelist")

        #  List them.
        for tile in tiles:
            msg_out("Tile {0} touches {1}".format(tile, indata))

#  Remove temporary files.
    cleanup()

#  If an StarUtilError of any kind occurred, display the message but hide the
#  python traceback. To see the trace back, uncomment "raise" instead.
except starutil.StarUtilError as err:
    #  raise
    print(err)
    cleanup()

# This is to trap control-C etc, so that we can clean up temp files.
except:
    cleanup()
    raise
コード例 #23
0
ファイル: pol2scan.py プロジェクト: sladen/starlink
   ref = parsys["REF"].value
   if not ref:
      ref = "!"

#  If no Q and U values were supplied, create a set of Q and U time
#  streams from the supplied analysed intensity time streams. Put them in
#  the QUDIR directory, or the temp directory if QUDIR is null.
   if inqu == None:
      north = parsys["NORTH"].value
      qudir =  parsys["QUDIR"].value
      if not qudir:
         qudir = NDG.tempdir
      elif not os.path.exists(qudir):
         os.makedirs(qudir)

      msg_out( "Calculating Q, U and I time streams for each bolometer...")
      invoke("$SMURF_DIR/calcqu in={0} lsqfit=yes config=def outq={1}/\*_QT "
             "outu={1}/\*_UT outi={1}/\*_IT fix=yes north={2}".
             format( indata, qudir, north ) )

#  Get groups listing the time series files created by calcqu.
      qts = NDG( "{0}/*_QT".format( qudir ) )
      uts = NDG( "{0}/*_UT".format( qudir ) )
      its = NDG( "{0}/*_IT".format( qudir ) )

#  If pre-calculated Q and U values were supplied, identifiy the Q, U and I
#  files.
   else:
      msg_out( "Using pre-calculating Q, U and I values...")

      qndfs = []
コード例 #24
0
ファイル: tilepaste.py プロジェクト: astrobuff/starlink
            deflt = "DAS"

         else:
            deflt = None

   except:
      deflt = None

   if deflt != None:
      parsys["INSTRUMENT"].default = deflt
      parsys["INSTRUMENT"].noprompt = True

#  Get the JCMT instrument. Quote the string so that it can be used as
#  a command line argument when running an atask from the shell.
   instrument = starutil.shell_quote( parsys["INSTRUMENT"].value )
   msg_out( "Updating tiles for {0} data".format(instrument) )

#  See if temp files are to be retained.
   retain = parsys["RETAIN"].value

#  Set up the dynamic default for parameter "JSA". This is True if the
#  dump of the WCS FrameSet in the first supplied NDF contains the string
#  "HPX".
   prj = invoke("$KAPPA_DIR/wcsattrib ndf={0} mode=get name=projection".format(indata[0]) )
   parsys["JSA"].default = True if prj.strip() == "HEALPix" else False

#  See if input NDFs are on the JSA all-sky pixel grid.
   jsa = parsys["JSA"].value
   if not jsa:
      msg_out( "The supplied NDFs will first be resampled onto the JSA "
               "all-sky pixel grid" )
コード例 #25
0
ファイル: sc2compare.py プロジェクト: kakirastern/starlink
#  Likewise compare the EXP_TIME extension NDF.
    report2 = os.path.join(NDG.tempdir, "report1")
    invoke("$KAPPA_DIR/ndfcompare in1={0}.more.smurf.exp_time accdat=1E-3 "
           "in2={1}.more.smurf.exp_time report={2} quiet".format(
               in1, in2, report2))

    if not starutil.get_task_par("similar", "ndfcompare"):
        similar = False
        report_lines.append(
            "\n\n{0}\n   Comparing EXP_TIME arrays....\n".format("-" * 80))
        with open(report2) as f:
            report_lines.extend(f.readlines())

#  Display the final result.
    if similar:
        msg_out("No differences found between {0} and {1}".format(in1, in2))
    else:
        msg_out("Significant differences found between {0} and {1}".format(
            in1, in2))

        #  If required write the report describing the differences to a text file.
        if report:
            with open(report, "w") as f:
                f.writelines(report_lines)
        msg_out("   (report written to file {0}).".format(report))

#  Write the output parameter.
    starutil.put_task_par("similar", "sc2compare", similar, "_LOGICAL")

    #  Remove temporary files.
    cleanup()
コード例 #26
0
ファイル: fw_2d.py プロジェクト: astrobuff/starlink
#  Fixed clump size (FWHM in pixels on all axes)
clump_fwhm = 10

#  Initial mean clump separation in pixels
clump_separation = clump_fwhm/2.0

#  Do tests for 5 different separations
for isep in range(0, 1):

#  Initial peak value
   peak_value = noise*0.5

#  Do tests for 5 different peak values
   for ipeak in range(0, 1):
      starutil.msg_out( ">>> Doing sep={0} and peak={1}....".format(clump_separation,peak_value))

#  Get the dimensions of a square image that would be expected to
#  contain the target number of clumps at the current separation.
      npix = int( clump_separation*math.sqrt( nclump_target ) )

#  Create a temporary file containing circular clumps of constant size
#  and shape (except for the effects of noise).
      model = NDG(1)
      out = NDG(1)
      outcat = NDG.tempfile(".fit")
      invoke( "$CUPID_DIR/makeclumps angle=\[0,0\] beamfwhm=0 deconv=no "
              "fwhm1=\[{0},0\] fwhm2=\[{0},0\] lbnd=\[1,1\] ubnd=\[{1},{1}\] "
              "model={2} nclump={3} out={4} outcat={5} pardist=normal "
              "peak = \[{6},0\] rms={7} trunc=0.1".
               format(clump_fwhm,npix,model,nclump_target,out,outcat,
コード例 #27
0
                        ipprms_dc_Q[row_val, col_val] = ipprms.x[2]
                        ipprms_dc_U[row_val, col_val] = ipprms.x[3]
                        chi2Vals[row_val, col_val] = ipprms.fun
                    else:
                        returnCode[row_val, col_val] = False

            # Write NDFs.
            out_p0 = write_ip_NDF(ip_prms['Pf_' + a[-1]], bad_pixel_ref)
            out_p1 = write_ip_NDF(ipprms_pol_screen, bad_pixel_ref)
            out_c0 = write_ip_NDF(ipprms_Co, bad_pixel_ref)
            out_angc = write_ip_NDF(ip_prms['Theta_ip_' + a[-1]],
                                    bad_pixel_ref)

            # Fill any bad pixels with smooth function to match surrounding pixels
            msg_out(
                "Filling in bad pixel values for {0} bolometer IP parameters..."
                .format(a))
            out_p0_filled = NDG(1)
            invoke(
                "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15"
                .format(out_p0, out_p0_filled))
            out_p1_filled = NDG(1)
            invoke(
                "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15"
                .format(out_p1, out_p1_filled))
            out_c0_filled = NDG(1)
            invoke(
                "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15"
                .format(out_c0, out_c0_filled))
            out_angc_filled = NDG(1)
            invoke(
コード例 #28
0
ファイル: tounimap.py プロジェクト: astrobuff/starlink
   indata = parsys["IN"].value
   retain = parsys["RETAIN"].value
   outbase = parsys["OUT"].value
   fakemap = parsys["FAKEMAP"].value

#  Erase any NDFs holding cleaned data, exteinction or pointing data from
#  previous runs.
   for path in glob.glob("*_con_res_cln.sdf"):
      myremove(path)
      base = path[:-16]
      myremove("{0}_lat.sdf".format(base))
      myremove("{0}_lon.sdf".format(base))
      myremove("{0}_con_ext.sdf".format(base))

#  Use sc2concat to concatenate and flatfield the data.
   msg_out( "Concatenating and flatfielding..." )
   concbase = NDG.tempfile("")
   invoke("$SMURF_DIR/sc2concat in={0} outbase={1} maxlen=360".format(indata,concbase))
   concdata = NDG( "{0}_*".format(concbase) )

#  Use makemap to generate quality, extinction and pointing info.
   confname = NDG.tempfile()
   fd = open(confname,"w")
   fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n")
   fd.write("numiter=1\n")
   fd.write("exportclean=1\n")
   fd.write("exportndf=ext\n")
   fd.write("exportlonlat=1\n")
   fd.write("dcfitbox=0\n")
   fd.write("noisecliphigh=0\n")
   fd.write("order=0\n")
コード例 #29
0
ファイル: skyloop.py プロジェクト: joaogerd/starlink
#  Initialise the parameters to hold any values supplied on the command
#  line. This automatically adds definitions for the additional parameters
#  "MSG_FILTER", "ILEVEL", "GLEVEL" and "LOGFILE".
   parsys = starutil.ParSys( params )

#  It's a good idea to get parameter values early if possible, in case
#  the user goes off for a coffee whilst the script is running and does not
#  see a later parameter prompt or error.
   restart = parsys["RESTART"].value
   if restart == None:
      retain = parsys["RETAIN"].value
   else:
      retain = True
      NDG.tempdir = restart
      NDG.overwrite = True
      msg_out( "Re-starting using data in {0}".format(restart) )

   indata = parsys["IN"].value
   outdata = parsys["OUT"].value
   niter = parsys["NITER"].value
   pixsize = parsys["PIXSIZE"].value
   config = parsys["CONFIG"].value
   ref = parsys["REF"].value
   mask2 = parsys["MASK2"].value
   mask3 = parsys["MASK3"].value
   extra = parsys["EXTRA"].value
   itermap = parsys["ITERMAP"].value

#  See if we are using pre-cleaned data, in which case there is no need
#  to export the cleaned data on the first iteration. Note we need to
#  convert the string returned by "invoke" to an int explicitly, otherwise
コード例 #30
0
def get_filtered_skydip_data(qarray, uarray, clip, a):
    """

    This function takes q and u array data (output from calcqu), applies ffclean to remove spikes
    and puts in numpy array variable
    It borrows (copies) heavily from pol2cat.py (2015A)

    Invocation:
        ( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref ) = ...
            get_filtered_skydip_data(qarray,uarray,clip,a)

    Arguments:
        qarray = An NDF of Q array data (output from calcqu).
        uarray = An NDF of U array data (output form calcqu).
        clip = The sigma cut for ffclean.
           a = A string indicating the array (eg. 'S8A').

    Returned Value:
        qdata_total = A numpy array with the cleaned qarray data.
        qvar_total = A numpy array with the qarray variance data.
        udata_total = A numpy array with the cleaned uarray data.
        uvar_total = A numpy array with the uarray variance data.
        elevation = A numpy array with the elevation data
        opacity_term = A numpy array with the opacity brightness term (1-exp(-tau*air_mass))
            Here tau is calculated using the WVM data as input.

    """

    #  Remove spikes from the Q images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "qff", which
    #  inherit its size from the existing group "qarray"".
    msg_out("Removing spikes from {0} bolometer Q values...".format(a))
    qff = NDG(qarray)
    qff.comment = "qff"
    invoke("$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".
           format(qarray, qff, clip))

    #  Remove spikes from the U images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "uff", which
    #  inherit its size from the existing group "uarray"".
    msg_out("Removing spikes from {0} bolometer U values...".format(a))
    uff = NDG(uarray)
    uff.comment = "uff"
    invoke("$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".
           format(uarray, uff, clip))

    elevation = []
    opacity_term = []
    for stare in range(len(qff[:])):
        # Stack Q data in numpy array
        # Get elevation information
        elevation.append(
            numpy.array(
                float(
                    invoke(
                        "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=ELSTART"
                        .format(qff[stare])))))
        # Get Tau (Opacity) information
        tau_temp = numpy.array(
            float(
                invoke(
                    "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=WVMTAUST".
                    format(qff[stare]))))
        # Convert to obs band.
        if '4' in a:
            tau_temp = 19.04 * (tau_temp - 0.018)  # Eq from Dempsey et al
        elif '8' in a:
            tau_temp = 5.36 * (tau_temp - 0.006)  # Eq from Dempsey et al.
        opacity_term.append(1 -
                            numpy.exp(-1 * tau_temp /
                                      numpy.sin(numpy.radians(elevation[-1]))))
        invoke("$KAPPA_DIR/ndftrace {0} quiet".format(qff[stare]))
        nx = get_task_par("dims(1)", "ndftrace")
        ny = get_task_par("dims(2)", "ndftrace")
        qdata_temp = numpy.reshape(Ndf(qff[stare]).data, (ny, nx))
        qdata_temp[numpy.abs(qdata_temp) > 1e300] = numpy.nan
        if stare == 0:
            qdata_total = qdata_temp
        else:
            qdata_total = numpy.dstack((qdata_total, qdata_temp))
        qvar_temp = numpy.reshape(Ndf(qff[stare]).var, (ny, nx))
        qdata_temp[numpy.abs(qvar_temp) > 1e300] = numpy.nan
        if stare == 0:
            qvar_total = qvar_temp
        else:
            qvar_total = numpy.dstack((qvar_total, qvar_temp))
        # Stack U data in numpy array
        invoke("$KAPPA_DIR/ndftrace {0} quiet".format(uff[stare]))
        nx = get_task_par("dims(1)", "ndftrace")
        ny = get_task_par("dims(2)", "ndftrace")
        udata_temp = numpy.reshape(Ndf(uff[stare]).data, (ny, nx))
        udata_temp[numpy.abs(udata_temp) > 1e300] = numpy.nan
        if stare == 0:
            udata_total = udata_temp
        else:
            udata_total = numpy.dstack((udata_total, udata_temp))
        uvar_temp = numpy.reshape(Ndf(uff[stare]).var, (ny, nx))
        udata_temp[numpy.abs(uvar_temp) > 1e300] = numpy.nan
        if stare == 0:
            uvar_total = uvar_temp
        else:
            uvar_total = numpy.dstack((uvar_total, uvar_temp))

    # Create bad pixel reference.
    bad_pixel_ref = NDG(1)
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(
        qff, uff, bad_pixel_ref))
    return (qdata_total, qvar_total, udata_total, uvar_total, elevation,
            opacity_term, bad_pixel_ref)
コード例 #31
0
ファイル: tilecutout.py プロジェクト: astrobuff/starlink
                                 noprompt=True))

#  Initialise the parameters to hold any values supplied on the command
#  line.
   parsys = ParSys( params )

#  It's a good idea to get parameter values early if possible, in case
#  the user goes off for a coffee whilst the script is running and does not
#  see a later parameter propmpt or error...
   region = parsys["REGION"].value
   outdata = parsys["OUT"].value
   instrument = starutil.shell_quote( parsys["INSTRUMENT"].value )
   retain = parsys["RETAIN"].value

#  Report what we will be doing...
   msg_out( "Creating a cut-out for {0} data".format(instrument) )

   tiledir = os.getenv( 'JSA_TILE_DIR' )
   if tiledir:
      msg_out( "Tiles will be read from {0}".format(tiledir) )
   else:
      msg_out( "Environment variable JSA_TILE_DIR is not set!" )
      msg_out( "Tiles will be read from the current directory ({0})".format(os.getcwd()) )

#  Create an empty list to hold the NDFs for the tiles holding the
#  required data.
   tilendf = []
   itilelist = []

#  Identify the tiles that overlap the specified region, and loop round
#  them.
コード例 #32
0
ファイル: pol2sim.py プロジェクト: sladen/starlink
#  Do not use more com files for each sub-array than are needed.
      remlist = []
      for subarr in ( "s8a", "s8b", "s8c", "s8d", "s4a", "s4b", "s4c", "s4d" ):
         nin = 0
         for ndf in indata:
            if subarr in ndf:
               nin += 1

         ncom = 0
         for ndf in incom:
            if subarr in ndf:
               ncom += 1
               if ncom > nin:
                  remlist.append( ndf )

      msg_out("Ignoring {0} surplus files in INCOM".format(len(remlist) ))
      for ndf in remlist:
        incom.remove( ndf )

#  See if new artificial I, Q and U maps are to be created.
   newart = parsys["NEWART"].value

#  If not, set the ART parameters to indicate that the specified NDFs
#  must already exist.
   if not newart:
      parsys["ARTI"].exists = True
      parsys["ARTQ"].exists = True
      parsys["ARTU"].exists = True
   else:
      parsys["ARTI"].exists = False
      parsys["ARTQ"].exists = False
コード例 #33
0
ファイル: pol2stack.py プロジェクト: astrobuff/starlink
#  See if temp files are to be retained.
   retain = parsys["RETAIN"].value

#  See statistical debiasing is to be performed.
   debias = parsys["DEBIAS"].value

#  See if we should convert pW to Jy.
   jy = parsys["JY"].value

#  Determine the waveband and get the corresponding FCF values with and
#  without POL2 in the beam.
   try:
      filter = int( float( starutil.get_fits_header( qin[0], "FILTER", True )))
   except NoValueError:
      filter = 850
      msg_out( "No value found for FITS header 'FILTER' in {0} - assuming 850".format(qin[0]))

   if filter == 450:
      fcf1 = 962.0
      fcf2 = 491.0
   elif filter == 850:
      fcf1 = 725.0
      fcf2 = 537.0
   else:
      raise starutil.InvalidParameterError("Invalid FILTER header value "
             "'{0} found in {1}.".format( filter, qin[0] ) )

#  Remove any spectral axes
   qtrim = NDG(qin)
   invoke( "$KAPPA_DIR/ndfcopy in={0} out={1} trim=yes".format(qin,qtrim) )
   utrim = NDG(uin)
コード例 #34
0
                       noprompt=True))

    #  Initialise the parameters to hold any values supplied on the command
    #  line.
    parsys = ParSys(params)

    #  It's a good idea to get parameter values early if possible, in case
    #  the user goes off for a coffee whilst the script is running and does not
    #  see a later parameter propmpt or error...
    region = parsys["REGION"].value
    outdata = parsys["OUT"].value
    instrument = starutil.shell_quote(parsys["INSTRUMENT"].value)
    retain = parsys["RETAIN"].value

    #  Report what we will be doing...
    msg_out("Creating a cut-out for {0} data".format(instrument))

    tiledir = os.getenv('JSA_TILE_DIR')
    if tiledir:
        msg_out("Tiles will be read from {0}".format(tiledir))
    else:
        msg_out("Environment variable JSA_TILE_DIR is not set!")
        msg_out("Tiles will be read from the current directory ({0})".format(
            os.getcwd()))

#  Create an empty list to hold the NDFs for the tiles holding the
#  required data.
    tilendf = []
    itilelist = []

    #  Identify the tiles that overlap the specified region, and loop round
コード例 #35
0
ファイル: pol2stack.py プロジェクト: kakirastern/starlink
            ipol2 = True
         elif not ipol2:
            ipol2 = None
            break
      else:
         if ipol2 is None:
            ipol2 = False
         elif ipol2:
            ipol2 = None
            break

   if ipol2 is None:
      raise starutil.InvalidParameterError("Mixture of POL2 and non-POL2 "
                      "I maps supplied - all I maps must be the same.")
   if ipol2:
      msg_out("Input I maps were created from POL2 data")
   else:
      msg_out("Input I maps were created from non-POL2 data")

#  Determine the waveband and get the corresponding FCF values with and
#  without POL2 in the beam.
   try:
      filter = int( float( starutil.get_fits_header( qin[0], "FILTER", True )))
   except NoValueError:
      filter = 850
      msg_out( "No value found for FITS header 'FILTER' in {0} - assuming 850".format(qin[0]))

   if filter == 450:
      fcf_qu = 962.0
      if ipol2:
         fcf_i = 962.0
コード例 #36
0
         if subarray is None:
            text = starutil.invoke( "$KAPPA_DIR/provshow {0}".format(config1) )
            if "s4a" in text or "s4b" in text or "s4c" in text or "s4d" in text:
               subarray = "s4"
            elif "s8a" in text or "s8b" in text or "s8c" in text or "s8d" in text:
               subarray = "s8"
            else:
               subarray = None
      except:
         print( "\n!! It looks like NDF '{0}' either does not exist or is "
                "corrupt.".format(config1) )
         os._exit(1)

   if isndf1:
      if subarray is None:
         msg_out("Cannot determine the SCUBA-2 waveband for NDF '{0}' "
                 "- was it really created by MAKEMAP?".format(config1), starutil.CRITICAL )
         waveband1 = None
      elif subarray[1:2] == "4":
         waveband1 = "450"
      elif subarray[1:2] == "8":
         waveband1 = "850"
      else:
         raise starutil.InvalidParameterError("Unexpected value '{0}' found "
                  "for SUBARRAY FITS Header in {1}.".format(subarray,config1))
   else:
      waveband1 = None

#  Get the second config string.
   config2 = parsys["CONFIG2"].value
   if config2 is None:
      if param is None:
コード例 #37
0
ファイル: sc2compare.py プロジェクト: joaogerd/starlink
         report_lines.extend( f.readlines() )

#  Likewise compare the EXP_TIME extension NDF.
   report2 = os.path.join(NDG.tempdir,"report1")
   invoke( "$KAPPA_DIR/ndfcompare in1={0}.more.smurf.exp_time accdat=1E-4 "
           "in2={1}.more.smurf.exp_time report={2} quiet".format(in1,in2,report2) )

   if not starutil.get_task_par( "similar", "ndfcompare" ):
      similar = False
      report_lines.append("\n\n{0}\n   Comparing EXP_TIME arrays....\n".format("-"*80))
      with open(report2) as f:
         report_lines.extend( f.readlines() )

#  Display the final result.
   if similar:
      msg_out( "No differences found between {0} and {1}".format(in1,in2))
   else:
      msg_out( "Significant differences found between {0} and {1}".format(in1,in2))

#  If required write the report describing the differences to a text file.
      if report:
         with open(report,"w") as f:
            f.writelines( report_lines )
      msg_out( "   (report written to file {0}).".format(report))

#  Write the output parameter.
   starutil.put_task_par( "similar", "sc2compare", similar, "_LOGICAL" )

#  Remove temporary files.
   cleanup()
コード例 #38
0
ファイル: jsajoin.py プロジェクト: astrobuff/starlink
         else:
            tile_dict[ jsatile ] = tile

#  Create a list holding the paths to the tile NDFs that intersect
#  the required region.
      ntile = 0
      used_tile_list = []
      for jsatile in jsatile_list:
         key = str(jsatile)
         if key in tile_dict and tile_dict[ key ]:
            used_tile_list.append( tile_dict[ key ] )
            ntile += 1

#  Create an NDG holding the group of tile NDFs.
      if ntile > 0:
         msg_out( "{0} of the supplied tiles intersect the requested region.".format(ntile) )
         used_tiles = NDG( used_tile_list )
      else:
         raise starutil.InvalidParameterError( "None of the supplied JSA tiles "
                                               "intersect the requested region" )

#  If we are using all tiles, just use the supplied group of tiles. Use
#  the middle supplied tile as the reference.
   else:
      used_tiles = tiles
      jsatile = int( len(tiles)/2 )
      jsatile = starutil.get_fits_header( tiles[ jsatile ], "TILENUM" )

#  Paste these tile NDFs into a single image. This image still uses the
#  JSA all-sky pixel grid. If we have only a single tile, then just use
#  it as it is.
コード例 #39
0
#  Do not use more com files for each sub-array than are needed.
        remlist = []
        for subarr in ("s8a", "s8b", "s8c", "s8d", "s4a", "s4b", "s4c", "s4d"):
            nin = 0
            for ndf in indata:
                if subarr in ndf:
                    nin += 1

            ncom = 0
            for ndf in incom:
                if subarr in ndf:
                    ncom += 1
                    if ncom > nin:
                        remlist.append(ndf)

        msg_out("Ignoring {0} surplus files in INCOM".format(len(remlist)))
        for ndf in remlist:
            incom.remove(ndf)

#  See if new artificial I, Q and U maps are to be created.
    newart = parsys["NEWART"].value

    #  If not, set the ART parameters to indicate that the specified NDFs
    #  must already exist.
    if not newart:
        parsys["ARTI"].exists = True
        parsys["ARTQ"].exists = True
        parsys["ARTU"].exists = True
    else:
        parsys["ARTI"].exists = False
        parsys["ARTQ"].exists = False
コード例 #40
0
def match( ref, imasked, fwhm1=4, fwhm2=100 ):

#  To avoid creating hundreds of temp NDFs, re-use the same ones for each
#  FWHM.
   lof = NDG(1)
   hif = NDG(1)
   iscaled = NDG(1)
   residuals = NDG(1)

#  Create a logarithmically spaced list of 5 FWHM values, in pixels,
#  between the supplied upper and lower FWHM limits. Try each smoothing FWHM
#  in turn, finding the one that gives the best match (i.e. lowest RMS
#  residuals) between high-pass filtered ref image and new I map. On each pass,
#  low frequencies are removed from the ref image using the current FWHM,
#  and the filtered ref image is compared to the new I map (allowing for
#  a degradation in FCF).
   minrms = 1.0E30
   result = (0.0,0.0)
   previous_fwhm = -1
   fwhm1_next = -1
   fwhm2_next = 0
   for fwhm in np.logspace( math.log10(fwhm1), math.log10(fwhm2), 5 ):

#  If required, record the current FWHM value as the upper limit for this
#  function on the next level of recursion.
      if fwhm2_next == -1:
         fwhm2_next = fwhm

#  If an error occurs estimating the RMS for a specific FWHM, ignore the
#  FWHM and pass on to the next.
      try:

#  High-pass filter the ref image by smoothing it with a Gaussian of the
#  current FWHM and then subtracting off the smoothed version.
         invoke("$KAPPA_DIR/gausmooth in={0} out={1} fwhm={2}".
                format( ref, lof, fwhm ))
         invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".
                format( ref, lof, hif ))

#  We will now use kappa:normalize to do a least squares fit between the
#  pixel values in the filtered ref image and the corresponding pixel values
#  in the new I map. This gives us the FCF degradation factor for the I
#  map (the gradient of the fit), and scales the I map so that it has the same
#  normalisation as the ref map. The scaling information is in the high
#  data values (the source regions), and the fitting process will be
#  confused if we include lots of background noise regions, so we use the
#  masked I map instead of the full I map. We also tell kappa:normalise
#  to use inly pixels that have a ref value above 2 times the noise value
#  in ref map (to exclude any noise pixels that have been included in the
#  masked I map). So first find the maximum value in the filtered ref map
#  (the upper data limit for kappa:normalize).
         invoke( "$KAPPA_DIR/stats ndf={0}".format(hif) )
         highlimit = float( get_task_par( "MAXIMUM", "stats" ) )

#  Get the noise level in the filtered ref map. This gives us the lower
#  data limit for kappa:normalize. The filtered noise ref has no low
#  frequencies ad so will be basically flat. So we can just the standard
#  deviation of the pixel values as the noise. But we do 3 iterations of
#  sigma clipping to exclude the bright source regions.
         invoke( "$KAPPA_DIR/stats ndf={0} clip=\[3,3,3\]".format(hif) )
         noise = float( get_task_par( "SIGMA", "stats" ) )

#  Now use kappa:normalise to do the fit, using only ref values between
#  lowlimit and highlimit. The slope gives the FCF degradation factor,
#  and the offset indicates the difference in bowling between the filtered
#  ref map and the I map (we do not use the offset).
         invoke( "$KAPPA_DIR/normalize in1={0} in2={1} out={2} device=! "
                 "datarange=\[{3},{4}\]".format(imasked,hif,iscaled,2*noise,
                                                highlimit))
         degfac = float( get_task_par( "SLOPE", "normalize" ) )

#  Now we have a version of the I map that is scaled so that it looks
#  like the filtered ref map. Get the residuals between the filtered ref
#  map and the scaled I map. Turn these residuals into SNR values by dividing
#  them by the noise level in the filtered ref map, and then get the RMS
#  of the residuals. We convert the residuals to SNR values because, if the
#  ref map and I map were identical, heavier filtering would reduce the
#  noise, and thus the RMS of the residuals. We want to minimise the RMS
#  of the residuals, and so without conversion to SNR, the minimum would
#  always be found at the heaviest possible filtering.
         invoke( "$KAPPA_DIR/maths exp=\"'(ia-ib)/pa'\" ia={0} ib={1} pa={2} out={3}".
                 format(hif,iscaled,noise,residuals))

#  Get the RMS of the residuals.
         invoke( "$KAPPA_DIR/stats ndf={0}".format(residuals) )
         mean = float( get_task_par( "MEAN", "stats" ) )
         sigma = float( get_task_par( "SIGMA", "stats" ) )
         rms = math.sqrt( mean*mean + sigma*sigma )

#  If this is the lowest RMS found so far, remember it - together with
#  the FWHM and degradation factor.
         if rms < minrms:
            minrms = rms
            result = (degfac,fwhm)
            fwhm1_next = previous_fwhm
            fwhm2_next = -1

#  If an error occurs estimating the RMS for a specific FWHM, ignore the
#  FWHM and pass on to the next.
      except starutil.AtaskError as err:
         pass

#  Record the current FWHM value for use on the next pass.
      previous_fwhm = fwhm

#  Progress report....
      msg_out("   Smoothing with FWHM = {0} pixels gives RMS = {1}".format(fwhm,rms))

#  If the range of FWHM values used by this invocation is greater than 1,
#  invoke this function recursively to find the best FWHM within a smaller
#  range centred on the best FWHM.
   if minrms < 1.0E30 and (fwhm2 - fwhm1) > 1:
      if fwhm1_next <= 0:
         fwhm1_next = fwhm1
      if fwhm2_next <= 0:
         fwhm2_next = fwhm2
      result = match( ref, imasked, fwhm1_next, fwhm2_next )


   return result
コード例 #41
0
ファイル: pol2_ipdata.py プロジェクト: astrobuff/starlink
                        ipprms_pol_screen[row_val,col_val] = ipprms.x[0]
                        ipprms_Co[row_val,col_val] = ipprms.x[1]
                        ipprms_dc_Q[row_val,col_val] = ipprms.x[2]
                        ipprms_dc_U[row_val,col_val] = ipprms.x[3]
                        chi2Vals[row_val,col_val] = ipprms.fun
                    else:
                        returnCode[row_val,col_val] = False

            # Write NDFs.
            out_p0 = write_ip_NDF(ip_prms['Pf_'+a[-1]],bad_pixel_ref)
            out_p1 = write_ip_NDF(ipprms_pol_screen,bad_pixel_ref)
            out_c0 = write_ip_NDF(ipprms_Co,bad_pixel_ref)
            out_angc = write_ip_NDF(ip_prms['Theta_ip_'+a[-1]],bad_pixel_ref)

            # Fill any bad pixels with smooth function to match surrounding pixels
            msg_out( "Filling in bad pixel values for {0} bolometer IP parameters...".format(a))
            out_p0_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_p0,out_p0_filled) )
            out_p1_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_p1,out_p1_filled) )
            out_c0_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_c0,out_c0_filled) )
            out_angc_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_angc,out_angc_filled) )

            # Copy individual NDFs to single output.
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_p0,outdata+'_preclean.'+str.lower(a)+'p0'))
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_p1,outdata+'_preclean.'+str.lower(a)+'p1'))
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_c0,outdata+'_preclean.'+str.lower(a)+'c0'))
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_angc,outdata+'_preclean.'+str.lower(a)+'angc'))
コード例 #42
0
ファイル: smurfutil.py プロジェクト: kakirastern/starlink
def remove_corr(ins, masks):
    """

   Masks the supplied set of Q or U images and then looks for and removes
   correlated components in the background regions.

   Invocation:
      result = remove_corr( ins, masks )

   Arguments:
      ins = NDG
         An NDG object specifying a group of Q or U images from which
         correlated background components are to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

    #  How many NDFs are we processing?
    nndf = len(ins)

    #  Blank out sources by copy the bad pixels from "mask" into "in". We refer
    #  to "q" below, but the same applies whether processing Q or U.
    msg_out("   masking...")
    qm = NDG(ins)
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins, masks, qm))

    #  Find the most correlated pair of imagtes. We use the basic correlation
    #  coefficient calculated by kappa:scatter for this.
    msg_out("   Finding most correlated pair of images...")
    cmax = 0
    for i in range(0, nndf - 1):
        for j in range(i + 1, nndf):
            invoke("$KAPPA_DIR/scatter in1={0} in2={1} device=!".format(
                qm[i], qm[j]))
            c = starutil.get_task_par("corr", "scatter")
            if abs(c) > abs(cmax):
                cmax = c
                cati = i
                catj = j

    if abs(cmax) < 0.3:
        msg_out("   No correlated images found!")
        return ins

    msg_out("   Correlation for best pair of images = {0}".format(cmax))

    #  Find images that are reasonably correlated to the pair found above,
    #  and coadd them to form a model for the correlated background
    #  component. Note, the holes left by the masking are filled in by the
    #  coaddition using background data from other images.
    msg_out("   Forming model...")

    #  Form the average of the two most correlated images, first normalising
    #  them to a common scale so that they both have equal weight.
    norm = "{0}/norm".format(NDG.tempdir)
    if not normer(qm[cati], qm[catj], 0.3, norm):
        norm = qm[cati]

    mslist = NDG([qm[catj], norm])
    ave = "{0}/ave".format(NDG.tempdir)
    invoke(
        "$CCDPACK_DIR/makemos in={0} method=mean genvar=no usevar=no out={1}".
        format(mslist, ave))

    #  Loop round each image finding the correlation factor of the image and
    #  the above average image.
    temp = "{0}/temp".format(NDG.tempdir)
    nlist = []
    ii = 0
    for i in range(0, nndf):
        c = blanker(qm[i], ave, temp)

        #  If the correlation is high enough, normalize the image to the average
        #  image and then include the normalised image in the list of images to be
        #  coadded to form the final model.
        if abs(c) > 0.3:
            tndf = "{0}/t{1}".format(NDG.tempdir, ii)
            ii += 1
            invoke(
                "$KAPPA_DIR/normalize in1={1} in2={2} out={0} device=!".format(
                    tndf, temp, ave))
            nlist.append(tndf)

    if ii == 0:
        msg_out("   No secondary correlated images found!")
        return ins

    msg_out(
        "   Including {0} secondary correlated images in the model.".format(
            ii))

    #  Coadded the images created above to form the model of the correlated
    #  background component. Fill any remaining bad pixels with artificial data.
    model = "{0}/model".format(NDG.tempdir)
    included = NDG(nlist)
    invoke(
        "$CCDPACK_DIR/makemos in={0} method=mean usevar=no genvar=no out={1}".
        format(included, temp))
    invoke("$KAPPA_DIR/fillbad in={1} variance=no out={0} size=10 niter=10".
           format(model, temp))

    #  Now estimate how much of the model is present in each image and remove it.
    msg_out("   Removing model...")
    temp2 = "{0}/temp2".format(NDG.tempdir)
    qnew = NDG(ins)
    nbetter = 0
    for i in range(0, nndf):

        #  Try to normalise the model to the current image. This fails if the
        #  correlation between them is too low.
        if normer(model, qm[i], 0.3, temp):

            #  Remove the scaled model form the image.
            invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(
                ins[i], temp, temp2))

            #  We now check that removing the correlated background component has in
            #  fact made the image flatter (poor fits etc can mean that images that
            #  are poorly correlated to the model have a large amount of model
            #  removed and so make the image less flat). FInd the standard deviation
            #  of the data in the original image and in the corrected image.
            invoke("$KAPPA_DIR/stats {0} quiet".format(ins[i]))
            oldsig = get_task_par("sigma", "stats")

            invoke("$KAPPA_DIR/stats {0} quiet".format(temp2))
            newsig = get_task_par("sigma", "stats")

            #  If the correction has made the image flatter, copy it to the returned NDG.
            if newsig < oldsig:
                nbetter += 1
                invoke("$KAPPA_DIR/ndfcopy in={1} out={0}".format(
                    qnew[i], temp2))
            else:
                invoke("$KAPPA_DIR/ndfcopy in={0} out={1}".format(
                    ins[i], qnew[i]))

#  If the input image is poorly correlated to the model, return the input
#  image unchanged.
        else:
            invoke("$KAPPA_DIR/ndfcopy in={0} out={1}".format(ins[i], qnew[i]))

    msg_out("   {0} out of {1} images have been improved.".format(
        nbetter, nndf))

    #  Return the corrected images.
    return qnew
コード例 #43
0
ファイル: pol2_ipdata.py プロジェクト: astrobuff/starlink
def get_filtered_skydip_data(qarray,uarray,clip,a):
    """

    This function takes q and u array data (output from calcqu), applies ffclean to remove spikes
    and puts in numpy array variable
    It borrows (copies) heavily from pol2cat.py (2015A)

    Invocation:
        ( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref ) = ...
            get_filtered_skydip_data(qarray,uarray,clip,a)

    Arguments:
        qarray = An NDF of Q array data (output from calcqu).
        uarray = An NDF of U array data (output form calcqu).
        clip = The sigma cut for ffclean.
           a = A string indicating the array (eg. 'S8A').

    Returned Value:
        qdata_total = A numpy array with the cleaned qarray data.
        qvar_total = A numpy array with the qarray variance data.
        udata_total = A numpy array with the cleaned uarray data.
        uvar_total = A numpy array with the uarray variance data.
        elevation = A numpy array with the elevation data
        opacity_term = A numpy array with the opacity brightness term (1-exp(-tau*air_mass))
            Here tau is calculated using the WVM data as input.

    """

    #  Remove spikes from the Q images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "qff", which
    #  inherit its size from the existing group "qarray"".
    msg_out( "Removing spikes from {0} bolometer Q values...".format(a))
    qff = NDG(qarray)
    qff.comment = "qff"
    invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".format(qarray,qff,clip) )

    #  Remove spikes from the U images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "uff", which
    #  inherit its size from the existing group "uarray"".
    msg_out( "Removing spikes from {0} bolometer U values...".format(a))
    uff = NDG(uarray)
    uff.comment = "uff"
    invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]"
            .format(uarray,uff,clip) )

    elevation = []
    opacity_term = []
    for stare in range(len(qff[:])):
    # Stack Q data in numpy array
        # Get elevation information
        elevation.append(numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=ELSTART".format( qff[ stare ] ) ) ) ) )
        # Get Tau (Opacity) information
        tau_temp = numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=WVMTAUST".format( qff[ stare ] ) ) ) )
        # Convert to obs band.
        if '4' in a:
             tau_temp = 19.04*(tau_temp-0.018) # Eq from Dempsey et al
        elif '8' in a:
             tau_temp = 5.36*(tau_temp-0.006) # Eq from Dempsey et al.
        opacity_term.append(1-numpy.exp(-1*tau_temp/numpy.sin(numpy.radians(elevation[-1]))))
        invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(qff[ stare ]))
        nx = get_task_par( "dims(1)", "ndftrace" )
        ny = get_task_par( "dims(2)", "ndftrace" )
        qdata_temp = numpy.reshape( Ndf( qff[ stare ] ).data, (ny,nx))
        qdata_temp[numpy.abs(qdata_temp)>1e300] = numpy.nan;
        if stare == 0:
            qdata_total = qdata_temp
        else:
            qdata_total = numpy.dstack((qdata_total,qdata_temp))
        qvar_temp = numpy.reshape( Ndf( qff[ stare ] ).var, (ny,nx))
        qdata_temp[numpy.abs(qvar_temp)>1e300] = numpy.nan;
        if stare == 0:
            qvar_total = qvar_temp
        else:
            qvar_total = numpy.dstack((qvar_total,qvar_temp))
        # Stack U data in numpy array
        invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(uff[ stare ]))
        nx = get_task_par( "dims(1)", "ndftrace" )
        ny = get_task_par( "dims(2)", "ndftrace" )
        udata_temp = numpy.reshape( Ndf( uff[ stare ] ).data, (ny,nx))
        udata_temp[numpy.abs(udata_temp)>1e300] = numpy.nan;
        if stare == 0:
            udata_total = udata_temp
        else:
            udata_total = numpy.dstack((udata_total,udata_temp))
        uvar_temp = numpy.reshape( Ndf( uff[ stare ] ).var, (ny,nx))
        udata_temp[numpy.abs(uvar_temp)>1e300] = numpy.nan;
        if stare == 0:
            uvar_total = uvar_temp
        else:
            uvar_total = numpy.dstack((uvar_total,uvar_temp))

    # Create bad pixel reference.
    bad_pixel_ref = NDG(1)
    invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(qff,uff,bad_pixel_ref))
    return( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref )
コード例 #44
0
#  Get the name of a temporary NDF that can be used to store the first
#  iteration map. This NDF is put in the NDG temp directory. If we are
#  only doing one iteration, used the supplied output NDF name.
   if niter == 1:
      newmap = outdata
   else:
      newmap = NDG(1)
   prevmap = None

#  Start a list of these maps in case we are creating an output itermap cube.
   maps = []
   maps.append(newmap)

# Now construct the text of the makemap command and invoke it.
   msg_out( "Iteration 1...")
   cmd = "$SMURF_DIR/makemap in={0} out={1} method=iter config='^{2}'".format(indata,newmap,conf0)
   if pixsize:
      cmd += " pixsize={0}".format(pixsize)
   if ref:
      cmd += " ref={0}".format(ref)
   if mask2:
      cmd += " mask2={0}".format(mask2)
   if mask3:
      cmd += " mask3={0}".format(mask3)
   if extra:
      cmd += " "+extra
   invoke(cmd)

#  The NDFs holding the cleaned time-series data will have been created by
#  makemap in the current working directory. Move them to the NDG temporary
コード例 #45
0
ファイル: configmeld.py プロジェクト: dt888/starlink
         if subarray == None:
            text = starutil.invoke( "$KAPPA_DIR/provshow {0}".format(config1) )
            if "s4a" in text or "s4b" in text or "s4c" in text or "s4d" in text:
               subarray = "s4"
            elif "s8a" in text or "s8b" in text or "s8c" in text or "s8d" in text:
               subarray = "s8"
            else:
               subarray = None
      except:
         print( "\n!! It looks like NDF '{0}' either does not exist or is "
                "corrupt.".format(config1) )
         os._exit(1)

   if isndf1:
      if subarray == None:
         msg_out("Cannot determine the SCUBA-2 waveband for NDF '{0}' "
                 "- was it really created by MAKEMAP?".format(config1), starutil.CRITICAL )
         waveband1 = None
      elif subarray[1:2] == "4":
         waveband1 = "450"
      elif subarray[1:2] == "8":
         waveband1 = "850"
      else:
         raise starutil.InvalidParameterError("Unexpected value '{0}' found "
                  "for SUBARRAY FITS Header in {1}.".format(subarray,config1))
   else:
      waveband1 = None

#  Get the second config string.
   config2 = parsys["CONFIG2"].value
   if config2 == None:
      if param == None:
コード例 #46
0
ファイル: skyloop.py プロジェクト: joequant/starlink
#  Initialise the parameters to hold any values supplied on the command
#  line. This automatically adds definitions for the additional parameters
#  "MSG_FILTER", "ILEVEL", "GLEVEL" and "LOGFILE".
   parsys = starutil.ParSys( params )

#  It's a good idea to get parameter values early if possible, in case
#  the user goes off for a coffee whilst the script is running and does not
#  see a later parameter prompt or error.
   restart = parsys["RESTART"].value
   if restart == None:
      retain = parsys["RETAIN"].value
   else:
      retain = True
      NDG.tempdir = restart
      NDG.overwrite = True
      msg_out( "Re-starting using data in {0}".format(restart) )

   indata = parsys["IN"].value
   outdata = parsys["OUT"].value
   niter = parsys["NITER"].value
   pixsize = parsys["PIXSIZE"].value
   config = parsys["CONFIG"].value
   ref = parsys["REF"].value
   mask2 = parsys["MASK2"].value
   mask3 = parsys["MASK3"].value
   extra = parsys["EXTRA"].value
   itermap = parsys["ITERMAP"].value

#  See if we are using pre-cleaned data, in which case there is no need
#  to export the cleaned data on the first iteration.
   if invoke( "$KAPPA_DIR/configecho name=doclean config={0} "
コード例 #47
0
ファイル: tounimap.py プロジェクト: kakirastern/starlink
    indata = parsys["IN"].value
    retain = parsys["RETAIN"].value
    outbase = parsys["OUT"].value
    fakemap = parsys["FAKEMAP"].value

    #  Erase any NDFs holding cleaned data, exteinction or pointing data from
    #  previous runs.
    for path in glob.glob("*_con_res_cln.sdf"):
        myremove(path)
        base = path[:-16]
        myremove("{0}_lat.sdf".format(base))
        myremove("{0}_lon.sdf".format(base))
        myremove("{0}_con_ext.sdf".format(base))

#  Use sc2concat to concatenate and flatfield the data.
    msg_out("Concatenating and flatfielding...")
    concbase = NDG.tempfile("")
    invoke("$SMURF_DIR/sc2concat in={0} outbase={1} maxlen=360".format(
        indata, concbase))
    concdata = NDG("{0}_*".format(concbase))

    #  Use makemap to generate quality, extinction and pointing info.
    confname = NDG.tempfile()
    fd = open(confname, "w")
    fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n")
    fd.write("numiter=1\n")
    fd.write("exportclean=1\n")
    fd.write("exportndf=ext\n")
    fd.write("exportlonlat=1\n")
    fd.write("dcfitbox=0\n")
    fd.write("noisecliphigh=0\n")
コード例 #48
0
#  Initialise the parameters to hold any values supplied on the command
#  line. This automatically adds definitions for the additional parameters
#  "MSG_FILTER", "ILEVEL", "GLEVEL" and "LOGFILE".
   parsys = starutil.ParSys( params )

#  It's a good idea to get parameter values early if possible, in case
#  the user goes off for a coffee whilst the script is running and does not
#  see a later parameter prompt or error.
   restart = parsys["RESTART"].value
   if restart == None:
      retain = parsys["RETAIN"].value
   else:
      retain = True
      NDG.tempdir = restart
      NDG.overwrite = True
      msg_out( "Re-starting using data in {0}".format(restart) )

   indata = parsys["IN"].value
   outdata = parsys["OUT"].value
   niter = parsys["NITER"].value
   pixsize = parsys["PIXSIZE"].value
   config = parsys["CONFIG"].value
   ref = parsys["REF"].value
   mask2 = parsys["MASK2"].value
   mask3 = parsys["MASK3"].value
   extra = parsys["EXTRA"].value
   extra1 = parsys["EXTRA1"].value
   itermap = parsys["ITERMAP"].value

#  See if we are using pre-cleaned data, in which case there is no need
#  to export the cleaned data on the first iteration. Note we need to