Exemplo n.º 1
0
def average_fiberflat(fiberflats):
    """Average several fiberflats 
    Args:
        fiberflats : list of `desispec.FiberFlat` object

    returns a desispec.FiberFlat object
    """

    log = get_logger()
    log.info("starting")

    if len(fiberflats) == 0:
        message = "input fiberflat list is empty"
        log.critical(message)
        raise ValueError(message)
    if len(fiberflats) == 1:
        log.warning("only one fiberflat to average??")
        return fiberflats[0]

    # check wavelength range
    for fflat in fiberflats[1:]:
        if not np.allclose(fiberflats[0].wave, fflat.wave):
            message = "fiberflats do not have the same wavelength arrays"
            log.critical(message)
            raise ValueError(message)
    wave = fiberflats[0].wave

    fiberflat = None
    ivar = None
    if len(fiberflats) > 2:
        log.info("{} fiberflat to average, use masked median".format(
            len(fiberflats)))
        tmp_fflat = []
        tmp_ivar = []
        tmp_mask = []
        for tmp in fiberflats:
            tmp_fflat.append(tmp.fiberflat)
            tmp_ivar.append(tmp.ivar)
            tmp_mask.append(tmp.mask)
        fiberflat = masked_median(np.array(tmp_fflat), np.array(tmp_mask))
        ivar = np.sum(np.array(tmp_ivar), axis=0)
        ivar *= 2. / np.pi  # penalty for using a median instead of a mean
    else:
        log.info("{} fiberflat to average, use weighted mean".format(
            len(fiberflats)))
        sw = None
        swf = None
        for tmp in fiberflats:
            w = (tmp.ivar) * (tmp.mask == 0)
            if sw is None:
                sw = w
                swf = w * tmp.fiberflat
                mask = tmp.mask
            else:
                sw += w
                swf += w * tmp.fiberflat
        fiberflat = swf / (sw + (sw == 0))
        ivar = sw

    # combined mask
    mask = None
    for tmp in fiberflats:
        if mask is None:
            mask = tmp.mask
        else:
            ii = (mask > 0) & (tmp.mask > 0)
            mask[ii] |= tmp.mask[ii]
            mask[
                tmp.mask ==
                0] = 0  # mask=0 on fiber and wave data point where at list one fiberflat has mask=0

    return FiberFlat(wave,
                     fiberflat,
                     ivar,
                     mask,
                     header=fiberflats[0].header,
                     fibers=fiberflats[0].fibers,
                     spectrograph=fiberflats[0].spectrograph)
Exemplo n.º 2
0
def main(args=None):

    log = get_logger()

    if args is None:
        args = parse()

    first_preproc = None
    images = []
    ivars = []
    masks = []

    log.info("read inputs ...")
    for filename in args.infile:
        log.info(" read {}".format(filename))
        tmp = read_image(filename)
        if first_preproc is None:
            first_preproc = tmp
        images.append(tmp.pix.ravel())
        # make sure we don't include data with ivar=0
        mask = tmp.mask + (tmp.ivar == 0).astype(int)
        masks.append(mask.ravel())
        ivars.append((tmp.ivar * (tmp.mask == 0)).ravel())

    images = np.array(images)
    masks = np.array(masks)
    ivars = np.array(ivars)
    smask = np.sum(masks, axis=0)

    log.info("compute masked median image ...")
    medimage = masked_median(images, masks)

    log.info("use masked median image to discard outlier ...")
    good = ((images - medimage)**2 * (ivars > 0) /
            ((medimage > 0) * (medimage * args.fracerr)**2 + 1. /
             (ivars + (ivars == 0)))) < args.nsig**3
    ivars *= good.astype(float)

    if args.weighted:
        log.info("compute weighted mean ...")
        sw = np.sum(ivars, axis=0)
        swf = np.sum(ivars * images, axis=0)
        meanimage = swf / (sw + (sw == 0))
        meanivar = sw
    else:
        log.info("compute unweighted mean ...")
        s1 = np.sum((ivars > 0).astype(int), axis=0)
        sf = np.sum((ivars > 0) * images, axis=0)
        meanimage = sf / (s1 + (s1 == 0))
        meanvar = np.sum(
            (ivars > 0) / (ivars + (ivars == 0))) / (s1 + (s1 == 0))**2
        meanivar = (meanvar > 0) / (meanvar + (meanvar == 0))
        log.info("write nimages.fits ...")
        fitsio.write("nimages.fits",
                     s1.reshape(first_preproc.pix.shape),
                     clobber=True)

    log.info("compute mask ...")
    meanmask = masks[0]
    for mask in masks[1:]:
        meanmask &= mask

    log.info("write image ...")
    preproc = first_preproc
    shape = preproc.pix.shape
    preproc.pix = meanimage.reshape(shape)
    preproc.ivar = meanivar.reshape(shape)
    preproc.mask = meanmask.reshape(shape)

    write_image(args.outfile, preproc)
    log.info("wrote {}".format(args.outfile))
Exemplo n.º 3
0
def average_fiberflat(fiberflats):
    """Average several fiberflats 
    Args:
        fiberflats : list of `desispec.FiberFlat` object

    returns a desispec.FiberFlat object
    """
    
    log=get_logger()
    log.info("starting")
    
    if len(fiberflats) == 0 :
        message = "input fiberflat list is empty"
        log.critical(message)
        raise ValueError(message)
    if len(fiberflats) == 1 :
        log.warning("only one fiberflat to average??")
        return fiberflats[0]

    # check wavelength range 
    for fflat in fiberflats[1:] :
        if not np.allclose(fiberflats[0].wave, fflat.wave):
            message = "fiberflats do not have the same wavelength arrays"
            log.critical(message)
            raise ValueError(message) 
    wave = fiberflats[0].wave
    
    fiberflat = None
    ivar      = None
    if len(fiberflats) > 2 :
        log.info("{} fiberflat to average, use masked median".format(len(fiberflats)))
        tmp_fflat = []
        tmp_ivar  = []
        tmp_mask  = []
        for tmp in fiberflats :
            tmp_fflat.append(tmp.fiberflat)
            tmp_ivar.append(tmp.ivar)
            tmp_mask.append(tmp.mask)
        fiberflat = masked_median(np.array(tmp_fflat),np.array(tmp_mask))
        ivar      = np.sum(np.array(tmp_ivar),axis=0)
        ivar     *= 2./np.pi # penalty for using a median instead of a mean
    else :
        log.info("{} fiberflat to average, use weighted mean".format(len(fiberflats)))
        sw=None
        swf=None
        for tmp in fiberflats :
            w   = (tmp.ivar)*(tmp.mask==0)
            if sw is None :
                sw   = w
                swf  = w*tmp.fiberflat
                mask = tmp.mask
            else :
                sw  += w
                swf += w*tmp.fiberflat
        fiberflat = swf/(sw+(sw==0))
        ivar      = sw
        
    # combined mask
    mask=None
    for tmp in fiberflats :
        if mask is None :
            mask = tmp.mask
        else :
            ii=(mask>0)&(tmp.mask>0)
            mask[ii] |= tmp.mask[ii]
            mask[tmp.mask==0] = 0 # mask=0 on fiber and wave data point where at list one fiberflat has mask=0

    return FiberFlat(wave,fiberflat,ivar,mask,
                     header=fiberflats[0].header, 
                     fibers=fiberflats[0].fibers,
                     spectrograph=fiberflats[0].spectrograph)
Exemplo n.º 4
0
def compute_dark_file(rawfiles,
                      outfile,
                      camera,
                      bias=None,
                      nocosmic=False,
                      scale=False,
                      exptime=None):
    """
    Compute classic dark model from input dark images

    Args:
        rawfiles (list of str): list of input raw data files (desi-*.fits.fz)
        outfile (str): output file with dark model to write
        camera (str): camera to process, e.g. b0, r1, z9

    Options:
        bias (str or list): bias file to use, or list of bias files
        nocosmic (bool): use medians instead of cosmic identification
        scale (bool): apply scale correction for EM0 teststand data
        exptime (float): write EXPTIME header keyword; all inputs must match

    Note: if bias is None, no bias correction is applied.  If it is a single
    file, then use that bias for all darks.  If it is a list, it must have
    len(rawfiles) and gives the per-file bias to use.

    Note: this computes a classic dark model without any non-linear terms.
    see bin/compute_dark_nonlinear for current DESI dark model.

    TODO: separate algorithm from I/O
    """
    log = get_logger()
    log.info("read images ...")

    shape = None
    images = []
    first_image_header = None
    if nocosmic:
        masks = None
    else:
        masks = []

    for ifile, filename in enumerate(rawfiles):
        log.info(f'Reading {filename} camera {camera}')

        # collect exposure times
        fitsfile = pyfits.open(filename)
        primary_header = fitsfile[0].header
        if not "EXPTIME" in primary_header:
            primary_header = fitsfile[1].header
        if "EXPREQ" in primary_header:
            thisexptime = primary_header["EXPREQ"]
            log.warning(
                "Using EXPREQ and not EXPTIME, because a more accurate quantity on teststand"
            )
        else:
            thisexptime = primary_header["EXPTIME"]

        flavor = primary_header['FLAVOR'].upper()
        if flavor != 'DARK':
            message = f'Input {filename} flavor {flavor} != DARK'
            log.error(message)
            raise ValueError(message)

        if exptime is not None:
            if round(exptime) != round(thisexptime):
                message = f'Input {filename} exptime {thisexptime} != requested exptime {exptime}'
                log.error(message)
                raise ValueError(message)

        if first_image_header is None:
            first_image_header = fitsfile[camera].header

        fitsfile.close()

        if bias is not None:
            if isinstance(bias, str):
                thisbias = bias
            elif isinstance(bias, (list, tuple, np.array)):
                thisbias = bias[ifile]
            else:
                message = 'bias should be None, str, list, or tuple, not {}'.format(
                    type(bias))
                log.error(message)
                raise RuntimeError(message)
        else:
            thisbias = False

        # read raw data and preprocess them
        img = io.read_raw(filename,
                          camera,
                          bias=thisbias,
                          nocosmic=nocosmic,
                          mask=False,
                          dark=False,
                          pixflat=False)

        # propagate gains to first_image_header
        if 'GAINA' in img.meta and 'GAINA' not in first_image_header:
            first_image_header['GAINA'] = img.meta['GAINA']
            first_image_header['GAINB'] = img.meta['GAINB']
            first_image_header['GAINC'] = img.meta['GAINC']
            first_image_header['GAIND'] = img.meta['GAIND']

        if shape is None:
            shape = img.pix.shape
        log.info("adding dark %s divided by exposure time %f s" %
                 (filename, thisexptime))
        images.append(img.pix.ravel() / thisexptime)
        if masks is not None:
            masks.append(img.mask.ravel())

    images = np.array(images)
    if masks is not None:
        masks = np.array(masks)
        smask = np.sum(masks, axis=0)
    else:
        smask = np.zeros(images[0].shape)

    log.info("compute median image ...")
    medimage = masked_median(images, masks)

    if scale:
        log.info("compute a scale per image ...")
        sm2 = np.sum((smask == 0) * medimage**2)
        ok = (medimage > 0.6 * np.median(medimage)) * (smask == 0)
        for i, image in enumerate(rawfiles):
            s = np.sum((smask == 0) * medimage * image) / sm2
            #s=np.median(image[ok]/medimage[ok])
            log.info("image %d scale = %f" % (i, s))
            images[i] /= s
        log.info("recompute median image after scaling ...")
        medimage = masked_median(images, masks)

    if True:
        log.info("compute mask ...")
        ares = np.abs(images - medimage)
        nsig = 4.
        mask = (ares < nsig * 1.4826 * np.median(ares, axis=0))
        # average (not median)
        log.info("compute average ...")
        meanimage = np.sum(images * mask, axis=0) / np.sum(mask, axis=0)
        meanimage = meanimage.reshape(shape)
    else:
        meanimage = medimage.reshape(shape)

    log.info("write result in %s ..." % outfile)
    hdulist = pyfits.HDUList([pyfits.PrimaryHDU(meanimage.astype('float32'))])

    # copy some keywords
    for key in [
            "TELESCOP",
            "INSTRUME",
            "SPECGRPH",
            "SPECID",
            "DETECTOR",
            "CAMERA",
            "CCDNAME",
            "CCDPREP",
            "CCDSIZE",
            "CCDTEMP",
            "CPUTEMP",
            "CASETEMP",
            "CCDTMING",
            "CCDCFG",
            "SETTINGS",
            "VESSEL",
            "FEEVER",
            "FEEBOX",
            "PRESECA",
            "PRRSECA",
            "DATASECA",
            "TRIMSECA",
            "BIASSECA",
            "ORSECA",
            "CCDSECA",
            "DETSECA",
            "AMPSECA",
            "PRESECB",
            "PRRSECB",
            "DATASECB",
            "TRIMSECB",
            "BIASSECB",
            "ORSECB",
            "CCDSECB",
            "DETSECB",
            "AMPSECB",
            "PRESECC",
            "PRRSECC",
            "DATASECC",
            "TRIMSECC",
            "BIASSECC",
            "ORSECC",
            "CCDSECC",
            "DETSECC",
            "AMPSECC",
            "PRESECD",
            "PRRSECD",
            "DATASECD",
            "TRIMSECD",
            "BIASSECD",
            "ORSECD",
            "CCDSECD",
            "DETSECD",
            "AMPSECD",
            "DAC0",
            "DAC1",
            "DAC2",
            "DAC3",
            "DAC4",
            "DAC5",
            "DAC6",
            "DAC7",
            "DAC8",
            "DAC9",
            "DAC10",
            "DAC11",
            "DAC12",
            "DAC13",
            "DAC14",
            "DAC15",
            "DAC16",
            "DAC17",
            "CLOCK0",
            "CLOCK1",
            "CLOCK2",
            "CLOCK3",
            "CLOCK4",
            "CLOCK5",
            "CLOCK6",
            "CLOCK7",
            "CLOCK8",
            "CLOCK9",
            "CLOCK10",
            "CLOCK11",
            "CLOCK12",
            "CLOCK13",
            "CLOCK14",
            "CLOCK15",
            "CLOCK16",
            "CLOCK17",
            "CLOCK18",
            "OFFSET0",
            "OFFSET1",
            "OFFSET2",
            "OFFSET3",
            "OFFSET4",
            "OFFSET5",
            "OFFSET6",
            "OFFSET7",
            "DELAYS",
            "CDSPARMS",
            "PGAGAIN",
            "OCSVER",
            "DOSVER",
            "CONSTVER",
            "GAINA",
            "GAINB",
            "GAINC",
            "GAIND",
    ]:
        if key in first_image_header:
            hdulist[0].header[key] = (first_image_header[key],
                                      first_image_header.comments[key])

    if exptime is not None:
        hdulist[0].header['EXPTIME'] = exptime

    hdulist[0].header["BUNIT"] = "electron/s"
    hdulist[0].header["EXTNAME"] = "DARK"

    for i, filename in enumerate(rawfiles):
        hdulist[0].header["INPUT%03d" % i] = os.path.basename(filename)

    hdulist.writeto(outfile, overwrite=True)
    log.info(f"Wrote {outfile}")

    log.info(f"done")