Ejemplo n.º 1
0
def write_ip_NDF(data, bad_pixel_ref):
    """

    This function writes out the array ip parameter data to an ndf_file.

    Invocation:
        result = write_ip_NDF(data,bad_pixel_ref)

    Arguements:
        data = The array ip parameter data
        bad_ref = A NDF with bad pixel values to copy over.

    Returned Value:
        Writes NDF and returns handle.
    """

    ndf_name_orig = NDG(1)
    indf = ndf.open(ndf_name_orig[0], 'WRITE', 'NEW')
    indf.new('_DOUBLE', 2, numpy.array([1, 1]), numpy.array([32, 40]))
    ndfmap = indf.map('DATA', '_DOUBLE', 'WRITE')
    ndfmap.numpytondf(data)
    indf.annul()

    # Copy bad pixels
    ndf_name = NDG(1)
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(
        ndf_name_orig, bad_pixel_ref, ndf_name))
    return ndf_name
Ejemplo n.º 2
0
def normer( model, test, cmin, newmodel ):
   """

   Normalise "model" to "test" returning result in "newmodel", so long as
   the "correlation factor" (determined by function blanker) of test and
   model is at least "cmin". Returns a boolean indicating the cmin value
   was reached.


   Invocation:
      result = normer( model, test, cmin, newmodel )

   Arguments:
      model = string
         The name of an existing NDF.
      test = string
         The name of an existing NDF.
      cmin = float
         The lowest acceptable absolute correlation factor.
      newmodel = string
         The name of an NDF to be created. The new NDF is only created if
         the cmin value is reached.

   Returned Value:
      A boolean indicating if the cmin value was reached.

   """

   btest = "{0}/btest".format(NDG.tempdir)
   if abs( blanker( test, model, btest ) ) > cmin:
      invoke( "$KAPPA_DIR/normalize in1={0} in2={2} out={1} device=!".format(model,newmodel,btest))
      return True
   else:
      return False
Ejemplo n.º 3
0
def normer(model, test, cmin, newmodel):
    """

   Normalise "model" to "test" returning result in "newmodel", so long as
   the "correlation factor" (determined by function blanker) of test and
   model is at least "cmin". Returns a boolean indicating the cmin value
   was reached.


   Invocation:
      result = normer( model, test, cmin, newmodel )

   Arguments:
      model = string
         The name of an existing NDF.
      test = string
         The name of an existing NDF.
      cmin = float
         The lowest acceptable absolute correlation factor.
      newmodel = string
         The name of an NDF to be created. The new NDF is only created if
         the cmin value is reached.

   Returned Value:
      A boolean indicating if the cmin value was reached.

   """

    btest = "{0}/btest".format(NDG.tempdir)
    if abs(blanker(test, model, btest)) > cmin:
        invoke("$KAPPA_DIR/normalize in1={0} in2={2} out={1} device=!".format(
            model, newmodel, btest))
        return True
    else:
        return False
Ejemplo n.º 4
0
def write_ip_NDF(data,bad_pixel_ref):
    """

    This function writes out the array ip parameter data to an ndf_file.

    Invocation:
        result = write_ip_NDF(data,bad_pixel_ref)

    Arguements:
        data = The array ip parameter data
        bad_ref = A NDF with bad pixel values to copy over.

    Returned Value:
        Writes NDF and returns handle.
    """

    ndf_name_orig = NDG(1)
    indf = ndf.open( ndf_name_orig[0], 'WRITE', 'NEW' )
    indf.new('_DOUBLE', 2, numpy.array([1,1]),numpy.array([32,40]))
    ndfmap = indf.map( 'DATA', '_DOUBLE', 'WRITE' )
    ndfmap.numpytondf( data )
    indf.annul()

    # Copy bad pixels
    ndf_name = NDG(1)
    invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ndf_name_orig,bad_pixel_ref,ndf_name) )
    return ndf_name
Ejemplo n.º 5
0
def run_calcqu(input_data,config,harmonic):
    #  The following call to SMURF:CALCQU creates two HDS container files -
    #  one holding a set of Q NDFs and the other holding a set of U NDFs. Create
    #  these container files in the NDG temporary directory.
    qcont = NDG(1)
    qcont.comment = "qcont"
    ucont = NDG(1)
    ucont.comment = "ucont"

    msg_out( "Calculating Q and U values for each bolometer...")
    invoke("$SMURF_DIR/calcqu in={0} config=\"{1}\" lsqfit=no outq={2} outu={3} "
           "harmonic={4} fix".format(input_data,starutil.shell_quote(config),
                                     qcont,ucont,harmonic) )
    return (qcont,ucont)
Ejemplo n.º 6
0
def run_calcqu(input_data, config, harmonic):
    #  The following call to SMURF:CALCQU creates two HDS container files -
    #  one holding a set of Q NDFs and the other holding a set of U NDFs. Create
    #  these container files in the NDG temporary directory.
    qcont = NDG(1)
    qcont.comment = "qcont"
    ucont = NDG(1)
    ucont.comment = "ucont"

    msg_out("Calculating Q and U values for each bolometer...")
    invoke(
        "$SMURF_DIR/calcqu in={0} config=\"{1}\" lsqfit=no outq={2} outu={3} "
        "harmonic={4} fix".format(input_data, starutil.shell_quote(config),
                                  qcont, ucont, harmonic))
    return (qcont, ucont)
Ejemplo n.º 7
0
def force_flat(ins, masks):
    """

   Forces the background regions to be flat in a set of Q or U images.

   Invocation:
      result = force_flat( ins, masks )

   Arguments:
      in = NDG
         An NDG object specifying a group of Q or U images from which
         any low frequency background structure is to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

    #  How many NDFs are we processing?
    nndf = len(ins)

    #  Blank out sources by copy the bad pixels from "mask" into "in".
    msg_out("   masking...")
    qm = NDG(ins)
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins, masks, qm))

    #  Smooth the blanked NDFs using a 3 pixel Gaussian. Set wlim so that
    #  small holes are filled in by the smoothing process.
    msg_out("   smoothing...")
    qs = NDG(ins)
    invoke("$KAPPA_DIR/gausmooth in={0} out={1} fwhm=3 wlim=0.5".format(
        qm, qs))

    #  Fill remaining big holes using artifical data.
    msg_out("   filling...")
    qf = NDG(ins)
    invoke("$KAPPA_DIR/fillbad in={0} out={1} niter=10 size=10 variance=no".
           format(qs, qf))

    #  Subtract the filled low frequency data form the original to create the
    #  returned images.
    msg_out("   removing low frequency background structure...")
    result = NDG(ins)
    invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(ins, qf, result))

    return result
Ejemplo n.º 8
0
def force_flat( ins, masks ):
   """

   Forces the background regions to be flat in a set of Q or U images.

   Invocation:
      result = force_flat( ins, masks )

   Arguments:
      in = NDG
         An NDG object specifying a group of Q or U images from which
         any low frequency background structure is to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

#  How many NDFs are we processing?
   nndf = len( ins )

#  Blank out sources by copy the bad pixels from "mask" into "in".
   msg_out( "   masking...")
   qm = NDG( ins )
   invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins,masks,qm) )

#  Smooth the blanked NDFs using a 3 pixel Gaussian. Set wlim so that
#  small holes are filled in by the smoothing process.
   msg_out( "   smoothing...")
   qs = NDG( ins )
   invoke( "$KAPPA_DIR/gausmooth in={0} out={1} fwhm=3 wlim=0.5".format(qm,qs) )

#  Fill remaining big holes using artifical data.
   msg_out( "   filling...")
   qf = NDG( ins )
   invoke( "$KAPPA_DIR/fillbad in={0} out={1} niter=10 size=10 variance=no".format(qs,qf) )

#  Subtract the filled low frequency data form the original to create the
#  returned images.
   msg_out( "   removing low frequency background structure...")
   result = NDG( ins )
   invoke( "$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(ins,qf,result) )

   return result
Ejemplo n.º 9
0
   retain = parsys["RETAIN"].value

#  The following call to SMURF:CALCQU creates two HDS container files -
#  one holding a set of Q NDFs and the other holding a set of U NDFs. Create
#  these container files in the NDG temporary directory.
   qcont = NDG(1)
   qcont.comment = "qcont"
   ucont = NDG(1)
   ucont.comment = "ucont"

#  Create a set of Q images and a set of U images. These are put into the HDS
#  container files "q_TMP.sdf" and "u_TMP.sdf". Each image contains Q or U
#  values derived from a short section of raw data during which each bolometer
#  moves less than half a pixel.
   msg_out( "Calculating Q and U values for each bolometer...")
   invoke("$SMURF_DIR/calcqu in={0} config={1} outq={2} outu={3} fix".
          format(indata,config,qcont,ucont) )

#  Remove spikes from the Q and U images. The cleaned NDFs are written to
#  temporary NDFs specified by two new NDG objects "qff" and "uff", which
#  inherit their size from the existing groups "qcont" and "ucont".
   msg_out( "Removing spikes from bolometer Q and U values...")
   qff = NDG(qcont)
   qff.comment = "qff"
   uff = NDG(ucont)
   uff.comment = "uff"
   invoke( "$KAPPA_DIR/ffclean in={0} out={1} box=3 clip=\[2,2,2\]"
           .format(qcont,qff) )
   invoke( "$KAPPA_DIR/ffclean in={0} out={1} box=3 clip=\[2,2,2\]"
           .format(ucont,uff) )

#  The next stuff we do independently for each subarray.
Ejemplo n.º 10
0
        parsys["INSTRUMENT"].default = deflt
        parsys["INSTRUMENT"].noprompt = True

#  Get the JCMT instrument. Quote the string so that it can be used as
#  a command line argument when running an atask from the shell.
    instrument = starutil.shell_quote(parsys["INSTRUMENT"].value)
    msg_out("Updating tiles for {0} data".format(instrument))

    #  See if temp files are to be retained.
    retain = parsys["RETAIN"].value

    #  Set up the dynamic default for parameter "JSA". This is True if the
    #  dump of the WCS FrameSet in the first supplied NDF contains the string
    #  "HPX".
    prj = invoke(
        "$KAPPA_DIR/wcsattrib ndf={0} mode=get name=projection".format(
            indata[0]))
    parsys["JSA"].default = True if prj.strip() == "HEALPix" else False

    #  See if input NDFs are on the JSA all-sky pixel grid.
    jsa = parsys["JSA"].value
    if not jsa:
        msg_out("The supplied NDFs will first be resampled onto the JSA "
                "all-sky pixel grid")

#  Report the tile directory.
    tiledir = os.getenv('JSA_TILE_DIR')
    if tiledir:
        msg_out("Tiles will be written to {0}".format(tiledir))
    else:
        msg_out("Environment variable JSA_TILE_DIR is not set!")
Ejemplo n.º 11
0
      fred = loadndg( "IN", True )
      if indata != fred:
         raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) "
                          "refers to different time-series data".format(restart) )
      msg_out( "Re-using data in {0}".format(restart) )

#  Initialise the starlink random number seed to a known value so that
#  results are repeatable.
   os.environ["STAR_SEED"] = "65"

#  Flat field the supplied template data
   ff = loadndg( "FF" )
   if not ff:
      ff = NDG(indata)
      msg_out( "Flatfielding template data...")
      invoke("$SMURF_DIR/flatfield in={0} out={1}".format(indata,ff) )
      ff = ff.filter()
      savendg( "FF", ff  )
   else:
      msg_out( "Re-using old flatfielded template data...")

#  If required, create new artificial I, Q and U maps.
   if newart:
      msg_out( "Creating new artificial I, Q and U maps...")

#  Get the parameters defining the artificial data
      ipeak = parsys["IPEAK"].value
      ifwhm = parsys["IFWHM"].value
      pol = parsys["POL"].value

#  Determine the spatial extent of the data on the sky.
Ejemplo n.º 12
0
   if deflt != None:
      parsys["INSTRUMENT"].default = deflt
      parsys["INSTRUMENT"].noprompt = True

#  Get the JCMT instrument. Quote the string so that it can be used as
#  a command line argument when running an atask from the shell.
   instrument = starutil.shell_quote( parsys["INSTRUMENT"].value )
   msg_out( "Updating tiles for {0} data".format(instrument) )

#  See if temp files are to be retained.
   retain = parsys["RETAIN"].value

#  Set up the dynamic default for parameter "JSA". This is True if the
#  dump of the WCS FrameSet in the first supplied NDF contains the string
#  "HPX".
   prj = invoke("$KAPPA_DIR/wcsattrib ndf={0} mode=get name=projection".format(indata[0]) )
   parsys["JSA"].default = True if prj.strip() == "HEALPix" else False

#  See if input NDFs are on the JSA all-sky pixel grid.
   jsa = parsys["JSA"].value
   if not jsa:
      msg_out( "The supplied NDFs will first be resampled onto the JSA "
               "all-sky pixel grid" )

#  Report the tile directory.
   tiledir = os.getenv( 'JSA_TILE_DIR' )
   if tiledir:
      msg_out( "Tiles will be written to {0}".format(tiledir) )
   else:
      msg_out( "Environment variable JSA_TILE_DIR is not set!" )
      msg_out( "Tiles will be written to the current directory ({0})".format(os.getcwd()) )
Ejemplo n.º 13
0
def remove_corr(ins, masks):
    """

   Masks the supplied set of Q or U images and then looks for and removes
   correlated components in the background regions.

   Invocation:
      result = remove_corr( ins, masks )

   Arguments:
      ins = NDG
         An NDG object specifying a group of Q or U images from which
         correlated background components are to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

    #  How many NDFs are we processing?
    nndf = len(ins)

    #  Blank out sources by copy the bad pixels from "mask" into "in". We refer
    #  to "q" below, but the same applies whether processing Q or U.
    msg_out("   masking...")
    qm = NDG(ins)
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins, masks, qm))

    #  Find the most correlated pair of imagtes. We use the basic correlation
    #  coefficient calculated by kappa:scatter for this.
    msg_out("   Finding most correlated pair of images...")
    cmax = 0
    for i in range(0, nndf - 1):
        for j in range(i + 1, nndf):
            invoke("$KAPPA_DIR/scatter in1={0} in2={1} device=!".format(
                qm[i], qm[j]))
            c = starutil.get_task_par("corr", "scatter")
            if abs(c) > abs(cmax):
                cmax = c
                cati = i
                catj = j

    if abs(cmax) < 0.3:
        msg_out("   No correlated images found!")
        return ins

    msg_out("   Correlation for best pair of images = {0}".format(cmax))

    #  Find images that are reasonably correlated to the pair found above,
    #  and coadd them to form a model for the correlated background
    #  component. Note, the holes left by the masking are filled in by the
    #  coaddition using background data from other images.
    msg_out("   Forming model...")

    #  Form the average of the two most correlated images, first normalising
    #  them to a common scale so that they both have equal weight.
    norm = "{0}/norm".format(NDG.tempdir)
    if not normer(qm[cati], qm[catj], 0.3, norm):
        norm = qm[cati]

    mslist = NDG([qm[catj], norm])
    ave = "{0}/ave".format(NDG.tempdir)
    invoke(
        "$CCDPACK_DIR/makemos in={0} method=mean genvar=no usevar=no out={1}".
        format(mslist, ave))

    #  Loop round each image finding the correlation factor of the image and
    #  the above average image.
    temp = "{0}/temp".format(NDG.tempdir)
    nlist = []
    ii = 0
    for i in range(0, nndf):
        c = blanker(qm[i], ave, temp)

        #  If the correlation is high enough, normalize the image to the average
        #  image and then include the normalised image in the list of images to be
        #  coadded to form the final model.
        if abs(c) > 0.3:
            tndf = "{0}/t{1}".format(NDG.tempdir, ii)
            ii += 1
            invoke(
                "$KAPPA_DIR/normalize in1={1} in2={2} out={0} device=!".format(
                    tndf, temp, ave))
            nlist.append(tndf)

    if ii == 0:
        msg_out("   No secondary correlated images found!")
        return ins

    msg_out(
        "   Including {0} secondary correlated images in the model.".format(
            ii))

    #  Coadded the images created above to form the model of the correlated
    #  background component. Fill any remaining bad pixels with artificial data.
    model = "{0}/model".format(NDG.tempdir)
    included = NDG(nlist)
    invoke(
        "$CCDPACK_DIR/makemos in={0} method=mean usevar=no genvar=no out={1}".
        format(included, temp))
    invoke("$KAPPA_DIR/fillbad in={1} variance=no out={0} size=10 niter=10".
           format(model, temp))

    #  Now estimate how much of the model is present in each image and remove it.
    msg_out("   Removing model...")
    temp2 = "{0}/temp2".format(NDG.tempdir)
    qnew = NDG(ins)
    nbetter = 0
    for i in range(0, nndf):

        #  Try to normalise the model to the current image. This fails if the
        #  correlation between them is too low.
        if normer(model, qm[i], 0.3, temp):

            #  Remove the scaled model form the image.
            invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(
                ins[i], temp, temp2))

            #  We now check that removing the correlated background component has in
            #  fact made the image flatter (poor fits etc can mean that images that
            #  are poorly correlated to the model have a large amount of model
            #  removed and so make the image less flat). FInd the standard deviation
            #  of the data in the original image and in the corrected image.
            invoke("$KAPPA_DIR/stats {0} quiet".format(ins[i]))
            oldsig = get_task_par("sigma", "stats")

            invoke("$KAPPA_DIR/stats {0} quiet".format(temp2))
            newsig = get_task_par("sigma", "stats")

            #  If the correction has made the image flatter, copy it to the returned NDG.
            if newsig < oldsig:
                nbetter += 1
                invoke("$KAPPA_DIR/ndfcopy in={1} out={0}".format(
                    qnew[i], temp2))
            else:
                invoke("$KAPPA_DIR/ndfcopy in={0} out={1}".format(
                    ins[i], qnew[i]))

#  If the input image is poorly correlated to the model, return the input
#  image unchanged.
        else:
            invoke("$KAPPA_DIR/ndfcopy in={0} out={1}".format(ins[i], qnew[i]))

    msg_out("   {0} out of {1} images have been improved.".format(
        nbetter, nndf))

    #  Return the corrected images.
    return qnew
Ejemplo n.º 14
0
#  It's a good idea to get parameter values early if possible, in case
#  the user goes off for a coffee whilst the script is running and does not
#  see a later parameter prompt or error.
   indata = parsys["IN"].value
   retain = parsys["RETAIN"].value

#  Erase any NDFs holding cleaned data or pointing data from previous runs.
   for path in glob.glob("s*_con_res_cln.sdf"):
      myremove(path)
      base = path[:-16]
      myremove("{0}_lat.sdf".format(base))
      myremove("{0}_lon.sdf".format(base))

#  Use sc2concat to concatenate and flatfield the data.
   invoke("$SMURF_DIR/sc2concat in={0} out='./*_umap'".format(indata))

#  Use makemap to generate quaity and pointing info.
   concdata = NDG("*_umap")
   confname = NDG.tempfile()
   fd = open(confname,"w")
   fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n")
   fd.write("numiter=1\n")
   fd.write("exportclean=1\n")
   fd.write("exportlonlat=1\n")
   fd.write("dcfitbox=0\n")
   fd.write("noisecliphigh=0\n")
   fd.write("order=0\n")
   fd.write("downsampscale=0\n")
   fd.close()
Ejemplo n.º 15
0
   indata = parsys["IN"].value
   outdata = parsys["OUT"].value
   niter = parsys["NITER"].value
   pixsize = parsys["PIXSIZE"].value
   config = parsys["CONFIG"].value
   ref = parsys["REF"].value
   mask2 = parsys["MASK2"].value
   mask3 = parsys["MASK3"].value
   extra = parsys["EXTRA"].value
   retain = parsys["RETAIN"].value
   itermap = parsys["ITERMAP"].value

#  If requested, use numiter from the config file.
   if niter == 0:
      niter = int( invoke( "$KAPPA_DIR/configecho name=numiter config={0} "
                           "defaults=$SMURF_DIR/smurf_makemap.def "
                           "select=\"\'450=0,850=0\'\"".format(config)))

#  If iterating to convergence, get the maximum allowed normalised map
#  change between iterations, and set the number of iterations positive.
   if niter < 0:
      niter = -niter
      maptol = float( invoke( "$KAPPA_DIR/configecho name=maptol config={0} "
                           "defaults=$SMURF_DIR/smurf_makemap.def "
                           "select=\"\'450=0,850=0\'\"".format(config)))
   else:
      maptol = 0

   converged = False

#  Determine the value of the (AST,COM,FLT).ZERO_NITER, ZERO_NOTLAST and
Ejemplo n.º 16
0
def remove_corr( ins, masks ):
   """

   Masks the supplied set of Q or U images and then looks for and removes
   correlated components in the background regions.

   Invocation:
      result = remove_corr( ins, masks )

   Arguments:
      ins = NDG
         An NDG object specifying a group of Q or U images from which
         correlated background components are to be removed.
      masks = NDG
         An NDG object specifying a corresponding group of Q or U images
         in which source pixels are bad. These are only used to mask the
         images specified by "in". It should have the same size as "in".

   Returned Value:
      A new NDG object containing the group of corrected Q or U images.

   """

#  How many NDFs are we processing?
   nndf = len( ins )

#  Blank out sources by copy the bad pixels from "mask" into "in". We refer
#  to "q" below, but the same applies whether processing Q or U.
   msg_out( "   masking...")
   qm = NDG( ins )
   invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(ins,masks,qm) )

#  Find the most correlated pair of imagtes. We use the basic correlation
#  coefficient calculated by kappa:scatter for this.
   msg_out( "   Finding most correlated pair of images...")
   cmax = 0
   for i in range(0,nndf-1):
      for j in range(i + 1,nndf):
         invoke( "$KAPPA_DIR/scatter in1={0} in2={1} device=!".format(qm[i],qm[j]) )
         c = starutil.get_task_par( "corr", "scatter" )
         if abs(c) > abs(cmax):
            cmax = c
            cati = i
            catj = j

   if abs(cmax) < 0.3:
      msg_out("   No correlated images found!")
      return ins

   msg_out( "   Correlation for best pair of images = {0}".format( cmax ) )

#  Find images that are reasonably correlated to the pair found above,
#  and coadd them to form a model for the correlated background
#  component. Note, the holes left by the masking are filled in by the
#  coaddition using background data from other images.
   msg_out( "   Forming model...")

#  Form the average of the two most correlated images, first normalising
#  them to a common scale so that they both have equal weight.
   norm = "{0}/norm".format(NDG.tempdir)
   if not normer( qm[cati], qm[catj], 0.3, norm ):
      norm = qm[cati]

   mslist = NDG( [ qm[catj], norm ] )
   ave = "{0}/ave".format(NDG.tempdir)
   invoke( "$CCDPACK_DIR/makemos in={0} method=mean genvar=no usevar=no out={1}".format(mslist,ave) )

#  Loop round each image finding the correlation factor of the image and
#  the above average image.
   temp = "{0}/temp".format(NDG.tempdir)
   nlist = []
   ii = 0
   for i in range(0,nndf):
      c = blanker( qm[i], ave, temp )

#  If the correlation is high enough, normalize the image to the average
#  image and then include the normalised image in the list of images to be
#  coadded to form the final model.
      if abs(c) > 0.3:
         tndf = "{0}/t{1}".format(NDG.tempdir,ii)
         ii += 1
         invoke( "$KAPPA_DIR/normalize in1={1} in2={2} out={0} device=!".format(tndf,temp,ave))
         nlist.append( tndf )

   if ii == 0:
      msg_out("   No secondary correlated images found!")
      return ins

   msg_out("   Including {0} secondary correlated images in the model.".format(ii) )

#  Coadded the images created above to form the model of the correlated
#  background component. Fill any remaining bad pixels with artificial data.
   model = "{0}/model".format(NDG.tempdir)
   included = NDG( nlist )
   invoke( "$CCDPACK_DIR/makemos in={0} method=mean usevar=no genvar=no out={1}".format( included, temp ) )
   invoke( "$KAPPA_DIR/fillbad in={1} variance=no out={0} size=10 niter=10".format(model,temp) )

#  Now estimate how much of the model is present in each image and remove it.
   msg_out("   Removing model...")
   temp2 = "{0}/temp2".format(NDG.tempdir)
   qnew = NDG(ins)
   nbetter = 0
   for i in range(0,nndf):

#  Try to normalise the model to the current image. This fails if the
#  correlation between them is too low.
      if normer( model, qm[i], 0.3, temp ):

#  Remove the scaled model form the image.
         invoke( "$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(ins[i],temp,temp2) )

#  We now check that removing the correlated background component has in
#  fact made the image flatter (poor fits etc can mean that images that
#  are poorly correlated to the model have a large amount of model
#  removed and so make the image less flat). FInd the standard deviation
#  of the data in the original image and in the corrected image.
         invoke( "$KAPPA_DIR/stats {0} quiet".format(ins[i]) )
         oldsig = get_task_par( "sigma", "stats" )

         invoke( "$KAPPA_DIR/stats {0} quiet".format(temp2) )
         newsig = get_task_par( "sigma", "stats" )

#  If the correction has made the image flatter, copy it to the returned NDG.
         if newsig < oldsig:
            nbetter += 1
            invoke( "$KAPPA_DIR/ndfcopy in={1} out={0}".format(qnew[i],temp2) )
         else:
            invoke( "$KAPPA_DIR/ndfcopy in={0} out={1}".format(ins[i],qnew[i]) )

#  If the input image is poorly correlated to the model, return the input
#  image unchanged.
      else:
         invoke( "$KAPPA_DIR/ndfcopy in={0} out={1}".format(ins[i],qnew[i]) )

   msg_out( "   {0} out of {1} images have been improved.".format(nbetter,nndf) )

#  Return the corrected images.
   return qnew
Ejemplo n.º 17
0
def match( ref, imasked, fwhm1=4, fwhm2=100 ):

#  To avoid creating hundreds of temp NDFs, re-use the same ones for each
#  FWHM.
   lof = NDG(1)
   hif = NDG(1)
   iscaled = NDG(1)
   residuals = NDG(1)

#  Create a logarithmically spaced list of 5 FWHM values, in pixels,
#  between the supplied upper and lower FWHM limits. Try each smoothing FWHM
#  in turn, finding the one that gives the best match (i.e. lowest RMS
#  residuals) between high-pass filtered ref image and new I map. On each pass,
#  low frequencies are removed from the ref image using the current FWHM,
#  and the filtered ref image is compared to the new I map (allowing for
#  a degradation in FCF).
   minrms = 1.0E30
   result = (0.0,0.0)
   previous_fwhm = -1
   fwhm1_next = -1
   fwhm2_next = 0
   for fwhm in np.logspace( math.log10(fwhm1), math.log10(fwhm2), 5 ):

#  If required, record the current FWHM value as the upper limit for this
#  function on the next level of recursion.
      if fwhm2_next == -1:
         fwhm2_next = fwhm

#  If an error occurs estimating the RMS for a specific FWHM, ignore the
#  FWHM and pass on to the next.
      try:

#  High-pass filter the ref image by smoothing it with a Gaussian of the
#  current FWHM and then subtracting off the smoothed version.
         invoke("$KAPPA_DIR/gausmooth in={0} out={1} fwhm={2}".
                format( ref, lof, fwhm ))
         invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".
                format( ref, lof, hif ))

#  We will now use kappa:normalize to do a least squares fit between the
#  pixel values in the filtered ref image and the corresponding pixel values
#  in the new I map. This gives us the FCF degradation factor for the I
#  map (the gradient of the fit), and scales the I map so that it has the same
#  normalisation as the ref map. The scaling information is in the high
#  data values (the source regions), and the fitting process will be
#  confused if we include lots of background noise regions, so we use the
#  masked I map instead of the full I map. We also tell kappa:normalise
#  to use inly pixels that have a ref value above 2 times the noise value
#  in ref map (to exclude any noise pixels that have been included in the
#  masked I map). So first find the maximum value in the filtered ref map
#  (the upper data limit for kappa:normalize).
         invoke( "$KAPPA_DIR/stats ndf={0}".format(hif) )
         highlimit = float( get_task_par( "MAXIMUM", "stats" ) )

#  Get the noise level in the filtered ref map. This gives us the lower
#  data limit for kappa:normalize. The filtered noise ref has no low
#  frequencies ad so will be basically flat. So we can just the standard
#  deviation of the pixel values as the noise. But we do 3 iterations of
#  sigma clipping to exclude the bright source regions.
         invoke( "$KAPPA_DIR/stats ndf={0} clip=\[3,3,3\]".format(hif) )
         noise = float( get_task_par( "SIGMA", "stats" ) )

#  Now use kappa:normalise to do the fit, using only ref values between
#  lowlimit and highlimit. The slope gives the FCF degradation factor,
#  and the offset indicates the difference in bowling between the filtered
#  ref map and the I map (we do not use the offset).
         invoke( "$KAPPA_DIR/normalize in1={0} in2={1} out={2} device=! "
                 "datarange=\[{3},{4}\]".format(imasked,hif,iscaled,2*noise,
                                                highlimit))
         degfac = float( get_task_par( "SLOPE", "normalize" ) )

#  Now we have a version of the I map that is scaled so that it looks
#  like the filtered ref map. Get the residuals between the filtered ref
#  map and the scaled I map. Turn these residuals into SNR values by dividing
#  them by the noise level in the filtered ref map, and then get the RMS
#  of the residuals. We convert the residuals to SNR values because, if the
#  ref map and I map were identical, heavier filtering would reduce the
#  noise, and thus the RMS of the residuals. We want to minimise the RMS
#  of the residuals, and so without conversion to SNR, the minimum would
#  always be found at the heaviest possible filtering.
         invoke( "$KAPPA_DIR/maths exp=\"'(ia-ib)/pa'\" ia={0} ib={1} pa={2} out={3}".
                 format(hif,iscaled,noise,residuals))

#  Get the RMS of the residuals.
         invoke( "$KAPPA_DIR/stats ndf={0}".format(residuals) )
         mean = float( get_task_par( "MEAN", "stats" ) )
         sigma = float( get_task_par( "SIGMA", "stats" ) )
         rms = math.sqrt( mean*mean + sigma*sigma )

#  If this is the lowest RMS found so far, remember it - together with
#  the FWHM and degradation factor.
         if rms < minrms:
            minrms = rms
            result = (degfac,fwhm)
            fwhm1_next = previous_fwhm
            fwhm2_next = -1

#  If an error occurs estimating the RMS for a specific FWHM, ignore the
#  FWHM and pass on to the next.
      except starutil.AtaskError as err:
         pass

#  Record the current FWHM value for use on the next pass.
      previous_fwhm = fwhm

#  Progress report....
      msg_out("   Smoothing with FWHM = {0} pixels gives RMS = {1}".format(fwhm,rms))

#  If the range of FWHM values used by this invocation is greater than 1,
#  invoke this function recursively to find the best FWHM within a smaller
#  range centred on the best FWHM.
   if minrms < 1.0E30 and (fwhm2 - fwhm1) > 1:
      if fwhm1_next <= 0:
         fwhm1_next = fwhm1
      if fwhm2_next <= 0:
         fwhm2_next = fwhm2
      result = match( ref, imasked, fwhm1_next, fwhm2_next )


   return result
Ejemplo n.º 18
0
        if system == "ICRS":
            parsys["CENTRE1"].prompt = "RA at centre of required circle"
            parsys["CENTRE2"].prompt = "Dec at centre of required circle"
        else:
            parsys[
                "CENTRE1"].prompt = "Galactic longitude at centre of required circle"
            parsys[
                "CENTRE2"].prompt = "Galactic latitude at centre of required circle"

        centre1 = parsys["CENTRE1"].value
        if centre1 is not None:
            centre2 = parsys["CENTRE2"].value
            radius = parsys["RADIUS"].value

            frame = NDG.tempfile()
            invoke("$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(
                system, frame))

            invoke("$ATOOLS_DIR/astunformat {0} 1 {1}".format(frame, centre1))
            cen1 = starutil.get_task_par("DVAL", "astunformat")
            invoke("$ATOOLS_DIR/astunformat {0} 2 {1}".format(frame, centre2))
            cen2 = starutil.get_task_par("DVAL", "astunformat")

            region = NDG.tempfile()
            invoke(
                "$ATOOLS_DIR/astcircle {0} 1 \[{1},{2}\] {3} ! ! {4}".format(
                    frame, cen1, cen2, math.radians(radius / 60.0), region))

#  If a Region was supplied ,not we do not yet have the coordinates of
#  the centre of the required region, and note if the Region is defined by
#  an NDF.
    else:
Ejemplo n.º 19
0
        elif cval == "DAS":
            instrument = "DAS"

#  If so, set the default for the INSTRUMENT parameter and prevent the
#  user being prompted for a value.
    if instrument is not None:
        parsys["INSTRUMENT"].default = instrument
        parsys["INSTRUMENT"].noprompt = True

#  Get the chosen instrument.
    instrument = parsys["INSTRUMENT"].value
    instrument = starutil.shell_quote(instrument)

    #  Get a list of the tiles that overlap the supplied NDF.
    invoke("$SMURF_DIR/jsatilelist in={0} instrument={1} quiet".format(
        inndf, instrument))
    tiles = starutil.get_task_par("TILES", "jsatilelist")

    #  JSADICER requires the input array to be gridded on the JSA all-sky
    #  pixel grid. This is normally an HPX projection, but if the supplied
    #  NDF straddles a discontinuity in the HPX projection then we need to
    #  use a different flavour of HPX (either an HPX projection centred on
    #  RA=12h or am XPH (polar HEALPix) projection centred on the north or
    #  south pole). The above call to jsatileinfo will have determined the
    #  appropriate projection to use, so get it.
    proj = starutil.get_task_par("PROJ", "jsatilelist")

    #  Create a file holding the FITS-WCS header for the first tile, using
    #  the type of projection determined above.
    head = "{0}/header".format(NDG.tempdir)
    invoke("$SMURF_DIR/jsatileinfo itile={0} instrument={1} header={2} "
Ejemplo n.º 20
0
    #  Get the radius of the map.
    radius = 0.5 * math.sqrt(map_hght * map_hght + map_wdth * map_wdth)

    #  Create a Frame describing the coordinate system.
    if tracksys == "GAL":
        sys = "galactic"
    elif tracksys == "J2000":
        sys = "fk5"
    else:
        raise starutil.InvalidParameterError(
            "The TRACKSYS header in {0} is {1} "
            "- should be GAL or J2000".format(indata, tracksys))

    frame = NDG.tempfile()
    invoke("$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(sys, frame))

    #  Create a Circle describing the map.
    if region is None:
        region = NDG.tempfile()
        display = True
    else:
        display = False

    invoke(
        "$ATOOLS_DIR/astcircle frame={0} form=1 centre=\[{1},{2}\] point={3} "
        "unc=! options=! result={4}".format(frame, basec1, basec2, radius,
                                            region))

    if display:
        f = open(region, "r")
Ejemplo n.º 21
0
      ref = "!"

#  If no Q and U values were supplied, create a set of Q and U time
#  streams from the supplied analysed intensity time streams. Put them in
#  the QUDIR directory, or the temp directory if QUDIR is null.
   if inqu == None:
      north = parsys["NORTH"].value
      qudir =  parsys["QUDIR"].value
      if not qudir:
         qudir = NDG.tempdir
      elif not os.path.exists(qudir):
         os.makedirs(qudir)

      msg_out( "Calculating Q, U and I time streams for each bolometer...")
      invoke("$SMURF_DIR/calcqu in={0} lsqfit=yes config=def outq={1}/\*_QT "
             "outu={1}/\*_UT outi={1}/\*_IT fix=yes north={2}".
             format( indata, qudir, north ) )

#  Get groups listing the time series files created by calcqu.
      qts = NDG( "{0}/*_QT".format( qudir ) )
      uts = NDG( "{0}/*_UT".format( qudir ) )
      its = NDG( "{0}/*_IT".format( qudir ) )

#  If pre-calculated Q and U values were supplied, identifiy the Q, U and I
#  files.
   else:
      msg_out( "Using pre-calculating Q, U and I values...")

      qndfs = []
      undfs = []
      indfs = []
Ejemplo n.º 22
0
      iref = "!"
   qref = parsys["QREF"].value
   uref = parsys["UREF"].value

#  If no Q and U values were supplied, create a set of Q and U time
#  streams from the supplied analysed intensity time streams. Put them in
#  the QUDIR directory, or the temp directory if QUDIR is null.
   if inqu == None:
      qudir =  parsys["QUDIR"].value
      if not qudir:
         qudir = NDG.tempdir
      elif not os.path.exists(qudir):
         os.makedirs(qudir)

      msg_out( "Calculating Q and U time streams for each bolometer...")
      invoke("$SMURF_DIR/calcqu in={0} lsqfit=yes config=def outq={1}/\*_QT "
             "outu={1}/\*_UT fix=yes".format( indata, qudir ) )

#  Get groups listing the time series files created by calcqu.
      qts = NDG( "{0}/*_QT".format( qudir ) )
      uts = NDG( "{0}/*_UT".format( qudir ) )

#  If pre-calculated Q and U values were supplied, identifiy the Q and U
#  files.
   else:
      msg_out( "Using pre-calculating Q and U values...")

      qndfs = []
      undfs = []
      for ndf in inqu:
         invoke("$KAPPA_DIR/ndftrace ndf={0} quiet".format(ndf) )
         label = starutil.get_task_par( "LABEL", "ndftrace" )
Ejemplo n.º 23
0
                        ipprms_dc_Q[row_val,col_val] = ipprms.x[2]
                        ipprms_dc_U[row_val,col_val] = ipprms.x[3]
                        chi2Vals[row_val,col_val] = ipprms.fun
                    else:
                        returnCode[row_val,col_val] = False

            # Write NDFs.
            out_p0 = write_ip_NDF(ip_prms['Pf_'+a[-1]],bad_pixel_ref)
            out_p1 = write_ip_NDF(ipprms_pol_screen,bad_pixel_ref)
            out_c0 = write_ip_NDF(ipprms_Co,bad_pixel_ref)
            out_angc = write_ip_NDF(ip_prms['Theta_ip_'+a[-1]],bad_pixel_ref)

            # Fill any bad pixels with smooth function to match surrounding pixels
            msg_out( "Filling in bad pixel values for {0} bolometer IP parameters...".format(a))
            out_p0_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_p0,out_p0_filled) )
            out_p1_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_p1,out_p1_filled) )
            out_c0_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_c0,out_c0_filled) )
            out_angc_filled = NDG(1)
            invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15".format(out_angc,out_angc_filled) )

            # Copy individual NDFs to single output.
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_p0,outdata+'_preclean.'+str.lower(a)+'p0'))
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_p1,outdata+'_preclean.'+str.lower(a)+'p1'))
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_c0,outdata+'_preclean.'+str.lower(a)+'c0'))
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_angc,outdata+'_preclean.'+str.lower(a)+'angc'))

            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_p0_filled,outdata+'.'+str.lower(a)+'p0'))
            invoke( "$KAPPA_DIR/ndfcopy {0} {1}".format(out_p1_filled,outdata+'.'+str.lower(a)+'p1'))
Ejemplo n.º 24
0
def blanker( test, model, newtest ):
   """

   Blank out pixels in "test" that are not well correlated with "model",
   returning result in newtest.

   Invocation:
      result =  blanker( test, model, newtest )

   Arguments:
      test = string
         The name of an existing NDF.
      model = string
         The name of an existing NDF.
      newtest = string
         The name of an NDF to be created.

   Returned Value:
      A value between +1 and -1 indicating the degree of correlation
      between the model and test.

   """

#  We want statistics of pixels that are present in both test and model,
#  so first form a mask by adding them together, and then copy bad pixels
#  form this mask into test and model
   mask = "{0}/mask".format(NDG.tempdir)
   tmask = "{0}/tmask".format(NDG.tempdir)
   mmask = "{0}/mmask".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/add in1={0} in2={1} out={2}".format(test,model,mask) )
   invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(test,mask,tmask) )
   invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(model,mask,mmask) )

#  Get the mean and standard deviation of the remaining pixels in the
#  test NDF.
   invoke( "$KAPPA_DIR/stats {0} clip=\[3,3,3\] quiet".format(tmask) )
   tmean = get_task_par( "mean", "stats" )
   tsigma = get_task_par( "sigma", "stats" )

#  Also get the number of good pixels in the mask.
   numgood1 = float( get_task_par( "numgood", "stats" ) )

#  Get the mean and standard deviation of the remaining pixels in the
#  model NDF.
   invoke( "$KAPPA_DIR/stats {0} clip=\[3,3,3\] quiet".format(mmask) )
   mmean = get_task_par( "mean", "stats" )
   msigma = get_task_par( "sigma", "stats" )

#  Normalize them both to have a mean of zero and a standard deviation of
#  unity.
   tnorm = "{0}/tnorm".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/maths exp='(ia-pa)/pb' ia={2} pa={0} pb={1} "
           "out={3}".format(tmean,tsigma,tmask,tnorm))

   mnorm = "{0}/mnorm".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/maths exp='(ia-pa)/pb' ia={2} pa={0} pb={1} "
           "out={3}".format(mmean,msigma,mmask,mnorm))

#  Find the difference between them.
   diff = "{0}/diff".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(mnorm,tnorm,diff) )

#  Remove pixels that differ by more than 0.5 standard deviations.
   mtmask = "{0}/mtmask".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/thresh in={0} thrlo=-0.5 newlo=bad thrhi=0.5 "
           "newhi=bad out={1}".format(diff,mtmask) )

#  See how many pixels remain (i.e. pixels that are very similar in the
#  test and model NDFs).
   invoke( "$KAPPA_DIR/stats {0} quiet".format(mtmask) )
   numgood2 = float( get_task_par( "numgood", "stats" ) )

#  It may be that the two NDFs are anti-correlated. To test for this we
#  negate the model and do the above test again.
   mnormn = "{0}/mnormn".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/cmult in={0} scalar=-1 out={1}".format(mnorm,mnormn) )

   diffn = "{0}/diffn".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(mnormn,tnorm,diffn ))

   mtmaskn = "{0}/mtmaskn".format(NDG.tempdir)
   invoke( "$KAPPA_DIR/thresh in={0} thrlo=-0.5 newlo=bad thrhi=0.5 "
           "newhi=bad out={1}".format(diffn,mtmaskn) )

   invoke( "$KAPPA_DIR/stats {0} quiet".format(mtmaskn) )
   numgood2n = float( get_task_par( "numgood", "stats" ) )

#  If we get more similar pixels by negating the model, the NDFs are
#  anti-correlated.
   if numgood2n > numgood2:

#  Take a copy of the supplied test NDF, masking out pixels that are not
#  anti-similar to the corresponding model pixels.
      invoke( "$KAPPA_DIR/copybad in={0} ref={2} out={1}".format(test,newtest,mtmaskn) )

#  The returned correlation factor is the ratio of the number of
#  anti-similar pixels to the total number of pixels which the two NDFs
#  have in common. But if there is not much difference between the number
#  of similar and anti-similar pixels, we assume there is no correlation.
      if numgood2n > 1.4*numgood2:
         res = -(numgood2n/numgood1)
      else:
         res = 0.0

#  If we get more similar pixels without negating the model, the NDFs are
#  correlated. Do the equivalent to the above.
   else:
      invoke( "$KAPPA_DIR/copybad in={0} ref={2} out={1}".format(test,newtest,mtmask) )
      if numgood2 > 1.4*numgood2n:
         res = numgood2/numgood1
      else:
         res = 0.0

#  If there are very few good pixels in common return zero correlation.
   if numgood1 < 150:
      res = 0.0

#  Return the correlation factor.
   return res
Ejemplo n.º 25
0
   pixsize = parsys["PIXSIZE"].value
   config = parsys["CONFIG"].value
   ref = parsys["REF"].value
   mask2 = parsys["MASK2"].value
   mask3 = parsys["MASK3"].value
   extra = parsys["EXTRA"].value
   extra1 = parsys["EXTRA1"].value
   itermap = parsys["ITERMAP"].value

#  See if we are using pre-cleaned data, in which case there is no need
#  to export the cleaned data on the first iteration. Note we need to
#  convert the string returned by "invoke" to an int explicitly, otherwise
#  the equality is never satisfied and we end up assuming that the raw
#  data has been precleaned, even if it hasn't been precleaned.
   if int( invoke( "$KAPPA_DIR/configecho name=doclean config={0} "
              "defaults=$SMURF_DIR/smurf_makemap.def "
              "select=\"\'450=0,850=1\'\" defval=1".format(config))) == 1:
      precleaned = False
   else:
      precleaned = True

#  If requested, use numiter from the config file. Arbitrarily choose 850 um
#  values for the waveband-specific parameters, but these are not actually used.
   if niter == 0:
      niter = int( invoke( "$KAPPA_DIR/configecho name=numiter config={0} "
                           "defaults=$SMURF_DIR/smurf_makemap.def "
                           "select=\"\'450=0,850=1\'\" defval=5".format(config)))

#  If iterating to convergence, get the maximum allowed normalised map
#  change between iterations, and set the number of iterations positive.
   if niter < 0:
Ejemplo n.º 26
0
def blanker(test, model, newtest):
    """

   Blank out pixels in "test" that are not well correlated with "model",
   returning result in newtest.

   Invocation:
      result =  blanker( test, model, newtest )

   Arguments:
      test = string
         The name of an existing NDF.
      model = string
         The name of an existing NDF.
      newtest = string
         The name of an NDF to be created.

   Returned Value:
      A value between +1 and -1 indicating the degree of correlation
      between the model and test.

   """

    #  We want statistics of pixels that are present in both test and model,
    #  so first form a mask by adding them together, and then copy bad pixels
    #  form this mask into test and model
    mask = "{0}/mask".format(NDG.tempdir)
    tmask = "{0}/tmask".format(NDG.tempdir)
    mmask = "{0}/mmask".format(NDG.tempdir)
    invoke("$KAPPA_DIR/add in1={0} in2={1} out={2}".format(test, model, mask))
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(
        test, mask, tmask))
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(
        model, mask, mmask))

    #  Get the mean and standard deviation of the remaining pixels in the
    #  test NDF.
    invoke("$KAPPA_DIR/stats {0} clip=\[3,3,3\] quiet".format(tmask))
    tmean = get_task_par("mean", "stats")
    tsigma = get_task_par("sigma", "stats")

    #  Also get the number of good pixels in the mask.
    numgood1 = float(get_task_par("numgood", "stats"))

    #  Get the mean and standard deviation of the remaining pixels in the
    #  model NDF.
    invoke("$KAPPA_DIR/stats {0} clip=\[3,3,3\] quiet".format(mmask))
    mmean = get_task_par("mean", "stats")
    msigma = get_task_par("sigma", "stats")

    #  Normalize them both to have a mean of zero and a standard deviation of
    #  unity.
    tnorm = "{0}/tnorm".format(NDG.tempdir)
    invoke("$KAPPA_DIR/maths exp=\"'(ia-pa)/pb'\" ia={2} pa={0} pb={1} "
           "out={3}".format(tmean, tsigma, tmask, tnorm))

    mnorm = "{0}/mnorm".format(NDG.tempdir)
    invoke("$KAPPA_DIR/maths exp=\"'(ia-pa)/pb'\" ia={2} pa={0} pb={1} "
           "out={3}".format(mmean, msigma, mmask, mnorm))

    #  Find the difference between them.
    diff = "{0}/diff".format(NDG.tempdir)
    invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(mnorm, tnorm, diff))

    #  Remove pixels that differ by more than 0.5 standard deviations.
    mtmask = "{0}/mtmask".format(NDG.tempdir)
    invoke("$KAPPA_DIR/thresh in={0} thrlo=-0.5 newlo=bad thrhi=0.5 "
           "newhi=bad out={1}".format(diff, mtmask))

    #  See how many pixels remain (i.e. pixels that are very similar in the
    #  test and model NDFs).
    invoke("$KAPPA_DIR/stats {0} quiet".format(mtmask))
    numgood2 = float(get_task_par("numgood", "stats"))

    #  It may be that the two NDFs are anti-correlated. To test for this we
    #  negate the model and do the above test again.
    mnormn = "{0}/mnormn".format(NDG.tempdir)
    invoke("$KAPPA_DIR/cmult in={0} scalar=-1 out={1}".format(mnorm, mnormn))

    diffn = "{0}/diffn".format(NDG.tempdir)
    invoke("$KAPPA_DIR/sub in1={0} in2={1} out={2}".format(
        mnormn, tnorm, diffn))

    mtmaskn = "{0}/mtmaskn".format(NDG.tempdir)
    invoke("$KAPPA_DIR/thresh in={0} thrlo=-0.5 newlo=bad thrhi=0.5 "
           "newhi=bad out={1}".format(diffn, mtmaskn))

    invoke("$KAPPA_DIR/stats {0} quiet".format(mtmaskn))
    numgood2n = float(get_task_par("numgood", "stats"))

    #  If we get more similar pixels by negating the model, the NDFs are
    #  anti-correlated.
    if numgood2n > numgood2:

        #  Take a copy of the supplied test NDF, masking out pixels that are not
        #  anti-similar to the corresponding model pixels.
        invoke("$KAPPA_DIR/copybad in={0} ref={2} out={1}".format(
            test, newtest, mtmaskn))

        #  The returned correlation factor is the ratio of the number of
        #  anti-similar pixels to the total number of pixels which the two NDFs
        #  have in common. But if there is not much difference between the number
        #  of similar and anti-similar pixels, we assume there is no correlation.
        if numgood2n > 1.4 * numgood2:
            res = -(numgood2n / numgood1)
        else:
            res = 0.0

#  If we get more similar pixels without negating the model, the NDFs are
#  correlated. Do the equivalent to the above.
    else:
        invoke("$KAPPA_DIR/copybad in={0} ref={2} out={1}".format(
            test, newtest, mtmask))
        if numgood2 > 1.4 * numgood2n:
            res = numgood2 / numgood1
        else:
            res = 0.0

#  If there are very few good pixels in common return zero correlation.
    if numgood1 < 150:
        res = 0.0

#  Return the correlation factor.
    return res
Ejemplo n.º 27
0
    in2 = parsys["IN2"].value

    #  See if temp files are to be retained.
    retain = parsys["RETAIN"].value

    #  Get the name of any report file to create.
    report = parsys["REPORT"].value

    #  Create an empty list to hold the lines of the report.
    report_lines = []

    #  Use kappa:ndfcompare to compare the main NDFs holding the map data
    #  array. Include a check that the root ancestors of the two maps are the
    #  same. Always create a report file so we can echo it to the screen.
    report0 = os.path.join(NDG.tempdir, "report0")
    invoke("$KAPPA_DIR/ndfcompare in1={0} in2={1} report={2} skiptests=! "
           "accdat=0.3v accvar=1E-3 quiet".format(in1, in2, report0))

    #  See if any differences were found. If so, append the lines of the
    #  report to the report_lines list.
    similar = starutil.get_task_par("similar", "ndfcompare")
    if not similar:
        with open(report0) as f:
            report_lines.extend(f.readlines())

#  Now compare the WEIGHTS extension NDF (no need for the roots ancestor
#  check since its already been done).
    report1 = os.path.join(NDG.tempdir, "report1")
    invoke("$KAPPA_DIR/ndfcompare in1={0}.more.smurf.weights accdat=1E-3 "
           "in2={1}.more.smurf.weights report={2} quiet".format(
               in1, in2, report1))
Ejemplo n.º 28
0
   basec2 = math.radians( basec2 )

#  Get the radius of the map.
   radius  = 0.5*math.sqrt( map_hght*map_hght + map_wdth*map_wdth )

#  Create a Frame describing the coordinate system.
   if tracksys == "GAL":
      sys = "galactic";
   elif tracksys == "J2000":
      sys = "fk5"
   else:
      raise starutil.InvalidParameterError("The TRACKSYS header in {0} is {1} "
                           "- should be GAL or J2000".format(indata,tracksys) )

   frame = NDG.tempfile()
   invoke( "$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(sys,frame) )

#  Create a Circle describing the map.
   if region == None:
      region = NDG.tempfile()
      display = True
   else:
      display = False

   invoke( "$ATOOLS_DIR/astcircle frame={0} form=1 centre=\[{1},{2}\] point={3} "
           "unc=! options=! result={4}".format(frame,basec1,basec2,radius,region) )

   if display:
      f = open( region, "r" )
      print( f.read() )
      f.close()
Ejemplo n.º 29
0
#  Do tests for 5 different peak values
   for ipeak in range(0, 1):
      starutil.msg_out( ">>> Doing sep={0} and peak={1}....".format(clump_separation,peak_value))

#  Get the dimensions of a square image that would be expected to
#  contain the target number of clumps at the current separation.
      npix = int( clump_separation*math.sqrt( nclump_target ) )

#  Create a temporary file containing circular clumps of constant size
#  and shape (except for the effects of noise).
      model = NDG(1)
      out = NDG(1)
      outcat = NDG.tempfile(".fit")
      invoke( "$CUPID_DIR/makeclumps angle=\[0,0\] beamfwhm=0 deconv=no "
              "fwhm1=\[{0},0\] fwhm2=\[{0},0\] lbnd=\[1,1\] ubnd=\[{1},{1}\] "
              "model={2} nclump={3} out={4} outcat={5} pardist=normal "
              "peak = \[{6},0\] rms={7} trunc=0.1".
               format(clump_fwhm,npix,model,nclump_target,out,outcat,
                      peak_value,noise) )

#  Run fellwalker on the data.
      mask = NDG(1)
      outcat_fw = NDG.tempfile(".fit")
      invoke( "$CUPID_DIR/findclumps config=def deconv=no in={0} "
              "method=fellwalker out={1} outcat={2} rms={3}".
               format(out,mask,outcat_fw,noise) )

# Get the number of clumps found by FellWalker.
      nfw = starutil.get_task_par( "nclumps", "findclumps" )
      if nfw > 0:

#  See how many of the clump peaks found by FellWalker match real clumps to
Ejemplo n.º 30
0
      ref = "!"

#  If no Q and U values were supplied, create a set of Q and U time
#  streams from the supplied analysed intensity time streams. Put them in
#  the QUDIR directory, or the temp directory if QUDIR is null.
   if inqu == None:
      north = parsys["NORTH"].value
      qudir =  parsys["QUDIR"].value
      if not qudir:
         qudir = NDG.tempdir
      elif not os.path.exists(qudir):
         os.makedirs(qudir)

      msg_out( "Calculating Q, U and I time streams for each bolometer...")
      invoke("$SMURF_DIR/calcqu in={0} lsqfit=yes config=def outq={1}/\*_QT "
             "outu={1}/\*_UT outi={1}/\*_IT fix=yes north={2}".
             format( indata, qudir, north ) )

#  Get groups listing the time series files created by calcqu.
      qts = NDG( "{0}/*_QT".format( qudir ) )
      uts = NDG( "{0}/*_UT".format( qudir ) )
      its = NDG( "{0}/*_IT".format( qudir ) )

#  If pre-calculated Q and U values were supplied, identifiy the Q, U and I
#  files.
   else:
      msg_out( "Using pre-calculating Q, U and I values...")

      qndfs = []
      undfs = []
      indfs = []
Ejemplo n.º 31
0
   niter = parsys["NITER"].value
   pixsize = parsys["PIXSIZE"].value
   config = parsys["CONFIG"].value
   ref = parsys["REF"].value
   mask2 = parsys["MASK2"].value
   mask3 = parsys["MASK3"].value
   extra = parsys["EXTRA"].value
   itermap = parsys["ITERMAP"].value

#  See if we are using pre-cleaned data, in which case there is no need
#  to export the cleaned data on the first iteration. Note we need to
#  convert the string returned by "invoke" to an int explicitly, otherwise
#  the equality is never satisfied and we end up assuming that the raw
#  data has been precleaned, even if it hasn't been precleaned.
   if int( invoke( "$KAPPA_DIR/configecho name=doclean config={0} "
              "defaults=$SMURF_DIR/smurf_makemap.def "
              "select=\"\'450=0,850=1\'\" defval=1".format(config))) == 1:
      precleaned = False
   else:
      precleaned = True

#  If requested, use numiter from the config file. Arbitrarily choose 850 um
#  values for the waveband-specific parameters, but these are not actually used.
   if niter == 0:
      niter = int( invoke( "$KAPPA_DIR/configecho name=numiter config={0} "
                           "defaults=$SMURF_DIR/smurf_makemap.def "
                           "select=\"\'450=0,850=1\'\" defval=5".format(config)))

#  If iterating to convergence, get the maximum allowed normalised map
#  change between iterations, and set the number of iterations positive.
   if niter < 0:
Ejemplo n.º 32
0
                        returnCode[row_val, col_val] = False

            # Write NDFs.
            out_p0 = write_ip_NDF(ip_prms['Pf_' + a[-1]], bad_pixel_ref)
            out_p1 = write_ip_NDF(ipprms_pol_screen, bad_pixel_ref)
            out_c0 = write_ip_NDF(ipprms_Co, bad_pixel_ref)
            out_angc = write_ip_NDF(ip_prms['Theta_ip_' + a[-1]],
                                    bad_pixel_ref)

            # Fill any bad pixels with smooth function to match surrounding pixels
            msg_out(
                "Filling in bad pixel values for {0} bolometer IP parameters..."
                .format(a))
            out_p0_filled = NDG(1)
            invoke(
                "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15"
                .format(out_p0, out_p0_filled))
            out_p1_filled = NDG(1)
            invoke(
                "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15"
                .format(out_p1, out_p1_filled))
            out_c0_filled = NDG(1)
            invoke(
                "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15"
                .format(out_c0, out_c0_filled))
            out_angc_filled = NDG(1)
            invoke(
                "$KAPPA_DIR/fillbad in={0} out={1} variance=true niter=10 size=15"
                .format(out_angc, out_angc_filled))

            # Copy individual NDFs to single output.
Ejemplo n.º 33
0
   tiledir = os.getenv( 'JSA_TILE_DIR' )
   if tiledir:
      msg_out( "Tiles will be read from {0}".format(tiledir) )
   else:
      msg_out( "Environment variable JSA_TILE_DIR is not set!" )
      msg_out( "Tiles will be read from the current directory ({0})".format(os.getcwd()) )

#  Create an empty list to hold the NDFs for the tiles holding the
#  required data.
   tilendf = []
   itilelist = []

#  Identify the tiles that overlap the specified region, and loop round
#  them.
   invoke("$SMURF_DIR/tilelist region={0} instrument={1}".format(region,instrument) )
   for itile in starutil.get_task_par( "tiles", "tilelist" ):

#  Get information about the tile, including the 2D spatial pixel index
#  bounds of its overlap with the required Region.
      invoke("$SMURF_DIR/tileinfo itile={0} instrument={1} "
             "target={2}".format(itile,instrument,region) )

#  Skip this tile if it does not exist (i.e. is empty).
      if starutil.get_task_par( "exists", "tileinfo" ):

#  Get the 2D spatial pixel index bounds of the part of the master tile that
#  overlaps the required region.
         tlbnd = starutil.get_task_par( "tlbnd", "tileinfo" )
         tubnd = starutil.get_task_par( "tubnd", "tileinfo" )
Ejemplo n.º 34
0
def get_filtered_skydip_data(qarray, uarray, clip, a):
    """

    This function takes q and u array data (output from calcqu), applies ffclean to remove spikes
    and puts in numpy array variable
    It borrows (copies) heavily from pol2cat.py (2015A)

    Invocation:
        ( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref ) = ...
            get_filtered_skydip_data(qarray,uarray,clip,a)

    Arguments:
        qarray = An NDF of Q array data (output from calcqu).
        uarray = An NDF of U array data (output form calcqu).
        clip = The sigma cut for ffclean.
           a = A string indicating the array (eg. 'S8A').

    Returned Value:
        qdata_total = A numpy array with the cleaned qarray data.
        qvar_total = A numpy array with the qarray variance data.
        udata_total = A numpy array with the cleaned uarray data.
        uvar_total = A numpy array with the uarray variance data.
        elevation = A numpy array with the elevation data
        opacity_term = A numpy array with the opacity brightness term (1-exp(-tau*air_mass))
            Here tau is calculated using the WVM data as input.

    """

    #  Remove spikes from the Q images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "qff", which
    #  inherit its size from the existing group "qarray"".
    msg_out("Removing spikes from {0} bolometer Q values...".format(a))
    qff = NDG(qarray)
    qff.comment = "qff"
    invoke("$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".
           format(qarray, qff, clip))

    #  Remove spikes from the U images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "uff", which
    #  inherit its size from the existing group "uarray"".
    msg_out("Removing spikes from {0} bolometer U values...".format(a))
    uff = NDG(uarray)
    uff.comment = "uff"
    invoke("$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".
           format(uarray, uff, clip))

    elevation = []
    opacity_term = []
    for stare in range(len(qff[:])):
        # Stack Q data in numpy array
        # Get elevation information
        elevation.append(
            numpy.array(
                float(
                    invoke(
                        "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=ELSTART"
                        .format(qff[stare])))))
        # Get Tau (Opacity) information
        tau_temp = numpy.array(
            float(
                invoke(
                    "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=WVMTAUST".
                    format(qff[stare]))))
        # Convert to obs band.
        if '4' in a:
            tau_temp = 19.04 * (tau_temp - 0.018)  # Eq from Dempsey et al
        elif '8' in a:
            tau_temp = 5.36 * (tau_temp - 0.006)  # Eq from Dempsey et al.
        opacity_term.append(1 -
                            numpy.exp(-1 * tau_temp /
                                      numpy.sin(numpy.radians(elevation[-1]))))
        invoke("$KAPPA_DIR/ndftrace {0} quiet".format(qff[stare]))
        nx = get_task_par("dims(1)", "ndftrace")
        ny = get_task_par("dims(2)", "ndftrace")
        qdata_temp = numpy.reshape(Ndf(qff[stare]).data, (ny, nx))
        qdata_temp[numpy.abs(qdata_temp) > 1e300] = numpy.nan
        if stare == 0:
            qdata_total = qdata_temp
        else:
            qdata_total = numpy.dstack((qdata_total, qdata_temp))
        qvar_temp = numpy.reshape(Ndf(qff[stare]).var, (ny, nx))
        qdata_temp[numpy.abs(qvar_temp) > 1e300] = numpy.nan
        if stare == 0:
            qvar_total = qvar_temp
        else:
            qvar_total = numpy.dstack((qvar_total, qvar_temp))
        # Stack U data in numpy array
        invoke("$KAPPA_DIR/ndftrace {0} quiet".format(uff[stare]))
        nx = get_task_par("dims(1)", "ndftrace")
        ny = get_task_par("dims(2)", "ndftrace")
        udata_temp = numpy.reshape(Ndf(uff[stare]).data, (ny, nx))
        udata_temp[numpy.abs(udata_temp) > 1e300] = numpy.nan
        if stare == 0:
            udata_total = udata_temp
        else:
            udata_total = numpy.dstack((udata_total, udata_temp))
        uvar_temp = numpy.reshape(Ndf(uff[stare]).var, (ny, nx))
        udata_temp[numpy.abs(uvar_temp) > 1e300] = numpy.nan
        if stare == 0:
            uvar_total = uvar_temp
        else:
            uvar_total = numpy.dstack((uvar_total, uvar_temp))

    # Create bad pixel reference.
    bad_pixel_ref = NDG(1)
    invoke("$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(
        qff, uff, bad_pixel_ref))
    return (qdata_total, qvar_total, udata_total, uvar_total, elevation,
            opacity_term, bad_pixel_ref)
Ejemplo n.º 35
0
      filter = 850
      msg_out( "No value found for FITS header 'FILTER' in {0} - assuming 850".format(qin[0]))

   if filter == 450:
      fcf1 = 962.0
      fcf2 = 491.0
   elif filter == 850:
      fcf1 = 725.0
      fcf2 = 537.0
   else:
      raise starutil.InvalidParameterError("Invalid FILTER header value "
             "'{0} found in {1}.".format( filter, qin[0] ) )

#  Remove any spectral axes
   qtrim = NDG(qin)
   invoke( "$KAPPA_DIR/ndfcopy in={0} out={1} trim=yes".format(qin,qtrim) )
   utrim = NDG(uin)
   invoke( "$KAPPA_DIR/ndfcopy in={0} out={1} trim=yes".format(uin,utrim) )
   itrim = NDG(iin)
   invoke( "$KAPPA_DIR/ndfcopy in={0} out={1} trim=yes".format(iin,itrim) )

#  Rotate them to use the same polarimetric reference direction.
   qrot = NDG(qtrim)
   urot = NDG(utrim)
   invoke( "$POLPACK_DIR/polrotref qin={0} uin={1} like={2} qout={3} uout={4} ".
           format(qtrim,utrim,qtrim[0],qrot,urot) )

#  Mosaic them into a single set of Q, U and I images, aligning them
#  with the first I image.
   qmos = NDG( 1 )
   invoke( "$KAPPA_DIR/wcsmosaic in={0} out={1} ref={2} method=bilin accept".format(qrot,qmos,itrim[0]) )
Ejemplo n.º 36
0
    print "prepared with the command: oracdr_scuba2_<band> <observation-date>"
    print "where band is: 450 or 850"
    print "and observation-date has the form: YYYYMMDD"
    sys.exit(0)

# print "band={0}".format(band)

# Get WNFACT value and nFrames from data file
wnfact = float(starutil.get_fits_header(indata, "WNFACT"))
# print "wnfact={0}".format(wnfact)
nFrames = int(starutil.get_fits_header(indata, "MIRSTOP")) + 1
# print "nFrames={0}".format(nFrames)

# Gather statistics on the central region of the input spectrum
# We are interested in the z position of the maximum pixel value (peak)
instats = invoke("$KAPPA_DIR/stats ndf={0} quiet".format(indata))
maxpos = starutil.get_task_par("MAXPOS", "stats")
maxposz = maxpos[2]
# print "maxposz={0}".format(maxposz)

# Calculate the band pass frames centered on the peak
if band == "SCUBA2_850":
    wnlbound = 11.2
    wnubound = 12.2
else:
    if band == "SCUBA2_450":
        wnlbound = 22.1
        wnubound = 23.3
# print "wnlbound={0}".format(wnlbound)
# print "wnubound={0}".format(wnubound)
bandwidth = wnubound - wnlbound
Ejemplo n.º 37
0
   if inqui:
      qin = inqui.filter("'\.Q$'" )
      uin = inqui.filter("'\.U$'" )
      iin = inqui.filter("'\.I$'" )

#  If not supplied, try again using INQ, INU and INI (i.e. scan & spin
#  data).
   else:
      qin = parsys["INQ"].value
      uin = parsys["INU"].value
      iin = parsys["INI"].value

#  Check they are all in units of pW.
      for quilist in (qin,uin,iin):
         for sdf in quilist:
            invoke("$KAPPA_DIR/ndftrace ndf={0} quiet".format(sdf) )
            units = starutil.get_task_par( "UNITS", "ndftrace" ).replace(" ", "")
            if units != "pW":
               raise starutil.InvalidParameterError("All supplied I, Q and U "
                    "maps must be in units of 'pW', but '{0}' has units '{1}'.".
                    format(sdf,units))

#  Now get the PI value to use.
   pimap = parsys["PI"].value

#  Now get the QUI value to use.
   qui = parsys["QUI"].value

#  Get the output catalogue now to avoid a long wait before the user gets
#  prompted for it.
   outcat = parsys["CAT"].value
Ejemplo n.º 38
0
def get_filtered_skydip_data(qarray,uarray,clip,a):
    """

    This function takes q and u array data (output from calcqu), applies ffclean to remove spikes
    and puts in numpy array variable
    It borrows (copies) heavily from pol2cat.py (2015A)

    Invocation:
        ( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref ) = ...
            get_filtered_skydip_data(qarray,uarray,clip,a)

    Arguments:
        qarray = An NDF of Q array data (output from calcqu).
        uarray = An NDF of U array data (output form calcqu).
        clip = The sigma cut for ffclean.
           a = A string indicating the array (eg. 'S8A').

    Returned Value:
        qdata_total = A numpy array with the cleaned qarray data.
        qvar_total = A numpy array with the qarray variance data.
        udata_total = A numpy array with the cleaned uarray data.
        uvar_total = A numpy array with the uarray variance data.
        elevation = A numpy array with the elevation data
        opacity_term = A numpy array with the opacity brightness term (1-exp(-tau*air_mass))
            Here tau is calculated using the WVM data as input.

    """

    #  Remove spikes from the Q images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "qff", which
    #  inherit its size from the existing group "qarray"".
    msg_out( "Removing spikes from {0} bolometer Q values...".format(a))
    qff = NDG(qarray)
    qff.comment = "qff"
    invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]".format(qarray,qff,clip) )

    #  Remove spikes from the U images for the current subarray. The cleaned NDFs
    #  are written to temporary NDFs specified by the new NDG object "uff", which
    #  inherit its size from the existing group "uarray"".
    msg_out( "Removing spikes from {0} bolometer U values...".format(a))
    uff = NDG(uarray)
    uff.comment = "uff"
    invoke( "$KAPPA_DIR/ffclean in={0} out={1} genvar=yes box=3 clip=\[{2}\]"
            .format(uarray,uff,clip) )

    elevation = []
    opacity_term = []
    for stare in range(len(qff[:])):
    # Stack Q data in numpy array
        # Get elevation information
        elevation.append(numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=ELSTART".format( qff[ stare ] ) ) ) ) )
        # Get Tau (Opacity) information
        tau_temp = numpy.array( float( invoke( "$KAPPA_DIR/fitsmod ndf={0} edit=print keyword=WVMTAUST".format( qff[ stare ] ) ) ) )
        # Convert to obs band.
        if '4' in a:
             tau_temp = 19.04*(tau_temp-0.018) # Eq from Dempsey et al
        elif '8' in a:
             tau_temp = 5.36*(tau_temp-0.006) # Eq from Dempsey et al.
        opacity_term.append(1-numpy.exp(-1*tau_temp/numpy.sin(numpy.radians(elevation[-1]))))
        invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(qff[ stare ]))
        nx = get_task_par( "dims(1)", "ndftrace" )
        ny = get_task_par( "dims(2)", "ndftrace" )
        qdata_temp = numpy.reshape( Ndf( qff[ stare ] ).data, (ny,nx))
        qdata_temp[numpy.abs(qdata_temp)>1e300] = numpy.nan;
        if stare == 0:
            qdata_total = qdata_temp
        else:
            qdata_total = numpy.dstack((qdata_total,qdata_temp))
        qvar_temp = numpy.reshape( Ndf( qff[ stare ] ).var, (ny,nx))
        qdata_temp[numpy.abs(qvar_temp)>1e300] = numpy.nan;
        if stare == 0:
            qvar_total = qvar_temp
        else:
            qvar_total = numpy.dstack((qvar_total,qvar_temp))
        # Stack U data in numpy array
        invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(uff[ stare ]))
        nx = get_task_par( "dims(1)", "ndftrace" )
        ny = get_task_par( "dims(2)", "ndftrace" )
        udata_temp = numpy.reshape( Ndf( uff[ stare ] ).data, (ny,nx))
        udata_temp[numpy.abs(udata_temp)>1e300] = numpy.nan;
        if stare == 0:
            udata_total = udata_temp
        else:
            udata_total = numpy.dstack((udata_total,udata_temp))
        uvar_temp = numpy.reshape( Ndf( uff[ stare ] ).var, (ny,nx))
        udata_temp[numpy.abs(uvar_temp)>1e300] = numpy.nan;
        if stare == 0:
            uvar_total = uvar_temp
        else:
            uvar_total = numpy.dstack((uvar_total,uvar_temp))

    # Create bad pixel reference.
    bad_pixel_ref = NDG(1)
    invoke( "$KAPPA_DIR/copybad in={0} ref={1} out={2}".format(qff,uff,bad_pixel_ref))
    return( qdata_total,qvar_total,udata_total,uvar_total,elevation,opacity_term,bad_pixel_ref )
Ejemplo n.º 39
0
   in2 = parsys["IN2"].value

#  See if temp files are to be retained.
   retain = parsys["RETAIN"].value

#  Get the name of any report file to create.
   report = parsys["REPORT"].value

#  Create an empty list to hold the lines of the report.
   report_lines = []

#  Use kappa:ndfcompare to compare the main NDFs holding the map data
#  array. Include a check that the root ancestors of the two maps are the
#  same. Always create a report file so we can echo it to the screen.
   report0 = os.path.join(NDG.tempdir,"report0")
   invoke( "$KAPPA_DIR/ndfcompare in1={0} in2={1} report={2} skiptests=! "
           "accdat=0.1v accvar=1E-4 quiet".format(in1,in2,report0) )

#  See if any differences were found. If so, append the lines of the
#  report to the report_lines list.
   similar = starutil.get_task_par( "similar", "ndfcompare" )
   if not similar:
      with open(report0) as f:
         report_lines.extend( f.readlines() )

#  Now compare the WEIGHTS extension NDF (no need for the roots ancestor
#  check since its already been done).
   report1 = os.path.join(NDG.tempdir,"report1")
   invoke( "$KAPPA_DIR/ndfcompare in1={0}.more.smurf.weights accdat=1E-4 "
           "in2={1}.more.smurf.weights report={2} quiet".format(in1,in2,report1) )

#  See if any differences were found. If so, append the report to any
Ejemplo n.º 40
0
#  and determine the waveband. If neither header is found, look for
#  "s4a", etc, in the NDF's provenance info.
   if isndf1:
      try:
         subarray = starutil.get_fits_header( config1, "SUBARRAY" )
         if subarray is None:
            filter = starutil.get_fits_header( config1, "FILTER" )
            if filter is not None:
               filter = filter.strip()
               if filter == "450":
                  subarray = "s4"
               elif filter == "850":
                  subarray = "s8"

         if subarray is None:
            text = starutil.invoke( "$KAPPA_DIR/provshow {0}".format(config1) )
            if "s4a" in text or "s4b" in text or "s4c" in text or "s4d" in text:
               subarray = "s4"
            elif "s8a" in text or "s8b" in text or "s8c" in text or "s8d" in text:
               subarray = "s8"
            else:
               subarray = None
      except:
         print( "\n!! It looks like NDF '{0}' either does not exist or is "
                "corrupt.".format(config1) )
         os._exit(1)

   if isndf1:
      if subarray is None:
         msg_out("Cannot determine the SCUBA-2 waveband for NDF '{0}' "
                 "- was it really created by MAKEMAP?".format(config1), starutil.CRITICAL )
Ejemplo n.º 41
0
        fred = NDG.load("IN", True)
        if indata != fred:
            raise UsageError(
                "\n\nThe directory specified by parameter RESTART ({0}) "
                "refers to different time-series data".format(restart))
        msg_out("Re-using data in {0}".format(restart))

#  Initialise the starlink random number seed to a known value so that
#  results are repeatable.
    os.environ["STAR_SEED"] = "65"

    #  Has the indput data been flatfielded? Test the first supplied input NDF.
    ff = None
    try:
        if "smf_flatfield" in invoke(
                "$HDSTRACE_DIR/hdstrace {0}.more.smurf.smurfhist".format(
                    indata[0])):
            msg_out("Input data has already been flatfielded.")
            ff = indata
    except starutil.StarUtilError:
        pass

#  Flat field the supplied template data
    if not ff:
        ff = NDG.load("FF")
        if ff:
            msg_out("Re-using old flatfielded template data...")

    if not ff:
        ffdir = NDG.subdir()
        msg_out("Flatfielding template data...")
Ejemplo n.º 42
0
      elif cval == "DAS":
         instrument = "DAS"

#  If so, set the default for the INSTRUMENT parameter and prevent the
#  user being prompted for a value.
   if instrument != None:
      parsys["INSTRUMENT"].default = instrument
      parsys["INSTRUMENT"].noprompt = True

#  Get the chosen instrument.
   instrument = parsys["INSTRUMENT"].value
   instrument = starutil.shell_quote( instrument )

#  Get a list of the tiles that overlap the supplied NDF.
   invoke( "$SMURF_DIR/jsatilelist in={0} instrument={1} quiet".format(inndf,instrument) )
   tiles = starutil.get_task_par( "TILES", "jsatilelist" )

#  JSADICER requires the input array to be gridded on the JSA all-sky
#  pixel grid. This is normally an HPX projection, but if the supplied
#  NDF straddles a discontinuity in the HPX projection then we need to
#  use a different flavour of HPX (either an HPX projection centred on
#  RA=12h or am XPH (polar HEALPix) projection centred on the north or
#  south pole). The above call to jsatileinfo will have determined the
#  appropriate projection to use, so get it.
   proj = starutil.get_task_par( "PROJ", "jsatilelist" )

#  Create a file holding the FITS-WCS header for the first tile, using
#  the type of projection determined above.
   head = "{0}/header".format(NDG.tempdir)
   invoke( "$SMURF_DIR/jsatileinfo itile={0} instrument={1} header={2} "
Ejemplo n.º 43
0
#  See if temp files are to be retained.
   retain = parsys["RETAIN"].value

#  See statistical debiasing is to be performed.
   debias = parsys["DEBIAS"].value

#  Get groups containing all the Q, U and I images.
   qin = inqui.filter("'\.Q$'" )
   uin = inqui.filter("'\.U$'" )
   iin = inqui.filter("'\.I$'" )

#  Rotate them to use the same polarimetric reference direction.
   qrot = NDG(qin)
   urot = NDG(uin)
   invoke( "$POLPACK_DIR/polrotref qin={0} uin={1} like={2} qout={3} uout={4} ".
           format(qin,uin,qin[0],qrot,urot) )

#  Mosaic them into a single set of Q, U and I images.
   qmos = NDG( 1 )
   invoke( "$KAPPA_DIR/wcsmosaic in={0} out={1} method=bilin accept".format(qrot,qmos) )
   umos = NDG( 1 )
   invoke( "$KAPPA_DIR/wcsmosaic in={0} out={1} method=bilin accept".format(urot,umos) )
   imos = NDG( 1 )
   invoke( "$KAPPA_DIR/wcsmosaic in={0} out={1} method=bilin accept".format(iin,imos) )

#  If required, save the Q, U and I images.
   if qui != None:
      invoke( "$KAPPA_DIR/ndfcopy {0} out={1}.Q".format(qmos,qui) )
      invoke( "$KAPPA_DIR/ndfcopy {0} out={1}.U".format(umos,qui) )
      invoke( "$KAPPA_DIR/ndfcopy {0} out={1}.I".format(imos,qui) )
Ejemplo n.º 44
0
def pca( indata, ncomp ):
   """

   Identifies and returns the strongest PCA components in a 3D NDF.

   Invocation:
      result = pca( indata, ncomp )

   Arguments:
      indata = NDG
         An NDG object specifying a single 3D NDF. Each plane in the cube
         is a separate image, and the images are compared using PCA.
      ncomp = int
         The number of PCA components to include in the returned NDF.

   Returned Value:
      A new NDG object containing a single 3D NDF containing just the
      strongest "ncomp" PCA components found in the input NDF.

   """

   msg_out( "   finding strongest {0} components using Principal Component Analysis...".format(ncomp) )

#  Get the shape of the input NDF.
   invoke( "$KAPPA_DIR/ndftrace {0} quiet".format(indata) )
   nx = get_task_par( "dims(1)", "ndftrace" )
   ny = get_task_par( "dims(2)", "ndftrace" )
   nz = get_task_par( "dims(3)", "ndftrace" )

#  Fill any bad pixels.
   tmp = NDG(1)
   invoke( "$KAPPA_DIR/fillbad in={0} out={1} variance=no niter=10 size=10".format(indata,tmp) )

#  Read the planes from the supplied NDF. Note, numpy axis ordering is the
#  reverse of starlink axis ordering. We want a numpy array consisting of
#  "nz" elements, each being a vectorised form of a plane from the 3D NDF.
   ndfdata = numpy.reshape( Ndf( tmp[0] ).data, (nz,nx*ny) )

#  Normalize each plane to a mean of zero and standard deviation of 1.0
   means = []
   sigmas = []
   newdata = []
   for iplane in range(0,nz):
      plane = ndfdata[ iplane ]
      mn = plane.mean()
      sg = math.sqrt( plane.var() )
      means.append( mn )
      sigmas.append( sg )

      if sg > 0.0:
         newdata.append( (plane-mn)/sg )

   newdata= numpy.array( newdata )

#  Transpose as required by MDP.
   pcadata = numpy.transpose( newdata )

#  Find the required number of PCA components (these are the strongest
#  components).
   pca = mdp.nodes.PCANode( output_dim=ncomp )
   comp = pca.execute( pcadata )

#  Re-project the components back into the space of the input 3D NDF.
   ip = numpy.dot( comp, pca.get_recmatrix() )

#  Transpose the array so that each row is an image.
   ipt = numpy.transpose(ip)

#  Normalise them back to the original scales.
   jplane = 0
   newdata = []
   for iplane in range(0,nz):
      if sigmas[ iplane ] > 0.0:
         newplane = sigmas[ iplane ] * ipt[ jplane ] + means[ iplane ]
         jplane += 1
      else:
         newplane = ndfdata[ iplane ]
      newdata.append( newplane )
   newdata= numpy.array( newdata )

#  Dump the re-projected images out to a 3D NDF.
   result = NDG(1)
   indf = ndf.open( result[0], 'WRITE', 'NEW' )
   indf.new('_DOUBLE', 3, numpy.array([1,1,1]),numpy.array([nx,ny,nz]))
   ndfmap = indf.map( 'DATA', '_DOUBLE', 'WRITE' )
   ndfmap.numpytondf( newdata )
   indf.annul()

#  Uncomment to dump the components.
#   msg_out( "Dumping PCA comps to {0}-comps".format(result[0]) )
#   compt = numpy.transpose(comp)
#   indf = ndf.open( "{0}-comps".format(result[0]), 'WRITE', 'NEW' )
#   indf.new('_DOUBLE', 3, numpy.array([1,1,1]),numpy.array([nx,ny,ncomp]))
#   ndfmap = indf.map( 'DATA', '_DOUBLE', 'WRITE' )
#   ndfmap.numpytondf( compt )
#   indf.annul()

   return result
Ejemplo n.º 45
0
    print "where band is: 450 or 850"
    print "and observation-date has the form: YYYYMMDD"
    sys.exit(0)


# print "band={0}".format(band)

# Get WNFACT value and nFrames from data file
wnfact = float(starutil.get_fits_header(indata, "WNFACT"))
# print "wnfact={0}".format(wnfact)
nFrames = int(starutil.get_fits_header(indata, "MIRSTOP")) + 1
# print "nFrames={0}".format(nFrames)

# Gather statistics on the central region of the input spectrum
# We are interested in the z position of the maximum pixel value (peak)
instats = invoke("$KAPPA_DIR/stats ndf={0} quiet".format(indata))
maxpos = starutil.get_task_par("MAXPOS", "stats")
maxposz = maxpos[2]
# print "maxposz={0}".format(maxposz)

# Calculate the band pass frames centered on the peak
if band == "SCUBA2_850":
    wnlbound = 11.2
    wnubound = 12.2
else:
    if band == "SCUBA2_450":
        wnlbound = 22.1
        wnubound = 23.3
# print "wnlbound={0}".format(wnlbound)
# print "wnubound={0}".format(wnubound)
bandwidth = wnubound - wnlbound
Ejemplo n.º 46
0
      fred = NDG.load( "IN", True )
      if indata != fred:
         raise UsageError("\n\nThe directory specified by parameter RESTART ({0}) "
                          "refers to different time-series data".format(restart) )
      msg_out( "Re-using data in {0}".format(restart) )

#  Initialise the starlink random number seed to a known value so that
#  results are repeatable.
   os.environ["STAR_SEED"] = "65"

#  Flat field the supplied template data
   ff = NDG.load( "FF" )
   if not ff:
      ffdir = NDG.subdir()
      msg_out( "Flatfielding template data...")
      invoke("$SMURF_DIR/flatfield in={0} out=\"{1}/*\"".format(indata,ffdir) )
      ff = NDG("{0}/\*".format(ffdir))
      ff.save( "FF" )
   else:
      msg_out( "Re-using old flatfielded template data...")

#  Output files. Base the modification on "ff" rather than "indata",
#  since "indata" may include non-science files (flatfields, darks etc)
#  for which no corresponding output file should be created.
   gexp = parsys["OUT"].value
   outdata = NDG( ff, gexp )

#  If required, create new artificial I, Q and U maps.
   if newart:
      msg_out( "Creating new artificial I, Q and U maps...")
Ejemplo n.º 47
0
#  To be a group expression, it must contain at least one of the
#  following characters: ^,= (NDFs are not allowed any of these).
   gexp_chars = set( '^=,' )
   if any( (c in gexp_chars) for c in config1 ):
      isndf1 = False
   else:
      isndf1 = True

#  If it is an NDF, attempt to get the SUBARRAY fits header, and
#  determine the waveband. If no SUBARRAY header is found, look for
#  "s4a", etc, in the NDF's provenance info.
   if isndf1:
      try:
         subarray = starutil.get_fits_header( config1, "SUBARRAY" )
         if subarray == None:
            text = starutil.invoke( "$KAPPA_DIR/provshow {0}".format(config1) )
            if "s4a" in text or "s4b" in text or "s4c" in text or "s4d" in text:
               subarray = "s4"
            elif "s8a" in text or "s8b" in text or "s8c" in text or "s8d" in text:
               subarray = "s8"
            else:
               subarray = None
      except:
         print( "\n!! It looks like NDF '{0}' either does not exist or is "
                "corrupt.".format(config1) )
         os._exit(1)

   if isndf1:
      if subarray == None:
         msg_out("Cannot determine the SCUBA-2 waveband for NDF '{0}' "
                 "- was it really created by MAKEMAP?".format(config1), starutil.CRITICAL )
Ejemplo n.º 48
0
    tiledir = os.getenv('JSA_TILE_DIR')
    if tiledir:
        msg_out("Tiles will be read from {0}".format(tiledir))
    else:
        msg_out("Environment variable JSA_TILE_DIR is not set!")
        msg_out("Tiles will be read from the current directory ({0})".format(
            os.getcwd()))

#  Create an empty list to hold the NDFs for the tiles holding the
#  required data.
    tilendf = []
    itilelist = []

    #  Identify the tiles that overlap the specified region, and loop round
    #  them.
    invoke("$SMURF_DIR/tilelist region={0} instrument={1}".format(
        region, instrument))
    for itile in starutil.get_task_par("tiles", "tilelist"):

        #  Get information about the tile, including the 2D spatial pixel index
        #  bounds of its overlap with the required Region.
        invoke("$SMURF_DIR/tileinfo itile={0} instrument={1} "
               "target={2}".format(itile, instrument, region))

        #  Skip this tile if it does not exist (i.e. is empty).
        if starutil.get_task_par("exists", "tileinfo"):

            #  Get the 2D spatial pixel index bounds of the part of the master tile that
            #  overlaps the required region.
            tlbnd = starutil.get_task_par("tlbnd", "tileinfo")
            tubnd = starutil.get_task_par("tubnd", "tileinfo")
Ejemplo n.º 49
0
   if region == None :
      system = parsys["SYSTEM"].value
      if system == "ICRS" :
         parsys["CENTRE1"].prompt = "RA at centre of required circle"
         parsys["CENTRE2"].prompt = "Dec at centre of required circle"
      else:
         parsys["CENTRE1"].prompt = "Galactic longitude at centre of required circle"
         parsys["CENTRE2"].prompt = "Galactic latitude at centre of required circle"

      centre1 = parsys["CENTRE1"].value
      if centre1 != None:
         centre2 = parsys["CENTRE2"].value
         radius = parsys["RADIUS"].value

         frame = NDG.tempfile()
         invoke( "$ATOOLS_DIR/astskyframe \"'system={0}'\" {1}".format(system,frame) )

         invoke( "$ATOOLS_DIR/astunformat {0} 1 {1}".format(frame,centre1) )
         cen1 = starutil.get_task_par( "DVAL", "astunformat" )
         invoke( "$ATOOLS_DIR/astunformat {0} 2 {1}".format(frame,centre2) )
         cen2 = starutil.get_task_par( "DVAL", "astunformat" )

         region = NDG.tempfile()
         invoke( "$ATOOLS_DIR/astcircle {0} 1 \[{1},{2}\] {3} ! ! {4}".
                 format(frame,cen1,cen2,math.radians(radius/60.0),region) )

#  If a Region was supplied ,not we do not yet have the coordinates of
#  the centre of the required region, and note if the Region is defined by
#  an NDF.
   else:
      try:
Ejemplo n.º 50
0
    outbase = parsys["OUT"].value
    fakemap = parsys["FAKEMAP"].value

    #  Erase any NDFs holding cleaned data, exteinction or pointing data from
    #  previous runs.
    for path in glob.glob("*_con_res_cln.sdf"):
        myremove(path)
        base = path[:-16]
        myremove("{0}_lat.sdf".format(base))
        myremove("{0}_lon.sdf".format(base))
        myremove("{0}_con_ext.sdf".format(base))

#  Use sc2concat to concatenate and flatfield the data.
    msg_out("Concatenating and flatfielding...")
    concbase = NDG.tempfile("")
    invoke("$SMURF_DIR/sc2concat in={0} outbase={1} maxlen=360".format(
        indata, concbase))
    concdata = NDG("{0}_*".format(concbase))

    #  Use makemap to generate quality, extinction and pointing info.
    confname = NDG.tempfile()
    fd = open(confname, "w")
    fd.write("^$STARLINK_DIR/share/smurf/dimmconfig.lis\n")
    fd.write("numiter=1\n")
    fd.write("exportclean=1\n")
    fd.write("exportndf=ext\n")
    fd.write("exportlonlat=1\n")
    fd.write("dcfitbox=0\n")
    fd.write("noisecliphigh=0\n")
    fd.write("order=0\n")
    fd.write("downsampscale=0\n")
    if fakemap != None: