Esempio n. 1
0
def test_instantiate(deimos_flat_files, kast_blue_bias_files):
    one_file = deimos_flat_files[0]
    spectograph = load_spectrograph('keck_deimos')
    # DEIMOS
    det = 3
    rawImage = rawimage.RawImage(one_file, spectograph, det)
    # Test
    assert rawImage.datasec_img.shape == (1, 4096, 2128), 'Wrong shape'

    # Kast blue
    det2 = 1
    one_file = kast_blue_bias_files[0]
    spectograph2 = load_spectrograph('shane_kast_blue')
    rawImage2 = rawimage.RawImage(one_file, spectograph2, det2)
    assert rawImage2.image.shape == (1, 350, 2112), 'Wrong shape'
Esempio n. 2
0
def test_instantiate_from_one(shane_kast_blue_sci_files):
    """
    Run on a single science frame
    """
    #
    det = 1
    # Load calibrations
    pixelflat = load_kast_blue_masters(pixflat=True)[0]
    bpm = kast_blue.empty_bpm(shane_kast_blue_sci_files[0], det)
    # Process steps
    bias = None
    par = kast_par['scienceframe']['process']
    process_steps = procimg.init_process_steps(bias, par)
    process_steps += ['trim', 'apply_gain', 'orient']
    process_steps += ['flatten']
    process_steps += ['extras']
    process_steps += ['crmask']
    # Load
    rawImage = rawimage.RawImage(shane_kast_blue_sci_files[0], kast_blue, det)
    processRawImage = processrawimage.ProcessRawImage(rawImage,
                                                      kast_par['scienceframe']['process'])
    pypeItImage = processRawImage.process(process_steps, pixel_flat=pixelflat)
    # Do it
    sciImg = scienceimage.ScienceImage(kast_blue, det, kast_par['scienceframe']['process'],
                                       pypeItImage.image, pypeItImage.ivar, bpm)
Esempio n. 3
0
def test_instantiate(deimos_flat_files, kast_blue_bias_files):
    one_file = deimos_flat_files[0]
    spectograph = load_spectrograph('keck_deimos')
    # DEIMOS
    det = 3
    rawImage = rawimage.RawImage(one_file, spectograph, det)
    # Test
    assert isinstance(rawImage.image, np.ndarray)
    assert rawImage.datasec_img.shape == (4096, 2128)

    # Kast blue
    det2 = 1
    one_file = kast_blue_bias_files[0]
    spectograph2 = load_spectrograph('shane_kast_blue')
    rawImage2 = rawimage.RawImage(one_file, spectograph2, det2)
    assert isinstance(rawImage2.image, np.ndarray)
Esempio n. 4
0
def test_continuum_subtraction(kast_blue_arc_file):
    one_file = kast_blue_arc_file[0]
    spectograph = load_spectrograph('shane_kast_blue')
    # Kast
    det = 1
    rawImage = rawimage.RawImage(one_file, spectograph, det)
    defpar = spectograph.default_pypeit_par(
    )['calibrations']['arcframe']['process']
    defpar['use_continuum'] = True
    rawImage.par = defpar
    # Subtract continuum
    rawImage.subtract_continuum(force=True)
    # Test
    assert rawImage.steps['subtract_continuum']
Esempio n. 5
0
def process_raw_for_jfh(filename,
                        spectrograph,
                        det=1,
                        proc_par=None,
                        process_steps=None,
                        bias=None):
    """
    Process an input raw frame for JFH

    Args:
        filename (str):
        spectrograph (:class:`pypeit.spectrographs.spectrograph.Spectrograph`):
            Spectrograph used to take the data.
        proc_par (:class:`pypeit.par.pypeitpar.ProcessImagesPar`):
            Parameters that dictate the processing of the images.  See
            :class:`pypeit.par.pypeitpar.ProcessImagesPar` for the
            defaults.
        det (:obj:`int`, optional):
            The 1-indexed detector number to process.
        process_steps (list, optional):
            Processing steps.
        bias (str or np.ndarray or None):
            Bias image or command

    Returns:
        :class:`pypeit.images.pypeitimage.PypeItImage`:

    """
    # Setup
    if proc_par is None:
        par = spectrograph.default_pypeit_par()
        msgs.warn("Using the Processing parameters from scienceframe")
        proc_par = par['scienceframe']['process']
    if process_steps is None:
        process_steps = procimg.init_process_steps(bias, proc_par)
        process_steps += ['trim']
        process_steps += ['orient']
        process_steps += ['apply_gain']

    # Generate the rawImage
    rawImage = rawimage.RawImage(filename, spectrograph, det)

    # Now Process
    processRawImage = ProcessRawImage(rawImage, proc_par)
    pypeitImage = processRawImage.process(process_steps, bias=bias)

    # Return
    return pypeitImage
Esempio n. 6
0
def test_instantiate_from_one(shane_kast_blue_sci_files):
    """
    Run on a single science frame
    """
    #
    det = 1
    # Load calibrations
    pixelflat = load_kast_blue_masters(pixflat=True)[0]
    bpm = kast_blue.empty_bpm(shane_kast_blue_sci_files[0], det)
    # Process steps -- Set in PypeItPar
    frame_par = kast_par['scienceframe']
    frame_par['process']['use_illumflat'] = False
    frame_par['process']['use_biasimage'] = False
    # Load
    rawImage = rawimage.RawImage(shane_kast_blue_sci_files[0], kast_blue, det)
    flatImages = flatfield.FlatImages(pixelflat_norm=pixelflat)
    pypeItImage = rawImage.process(frame_par['process'], flatimages=flatImages)
Esempio n. 7
0
def test_overscan_subtract(deimos_flat_files):
    one_file = deimos_flat_files[0]
    spectograph = load_spectrograph('keck_deimos')
    # DEIMOS
    det = 3
    rawImage = rawimage.RawImage(one_file, spectograph, det)
    rawImage.par = spectograph.default_pypeit_par()['scienceframe']['process']
    # Bias subtract
    pre_sub = rawImage.image.copy()
    rawImage.subtract_overscan()
    oscan = np.median(pre_sub - rawImage.image)
    assert np.isclose(oscan, 1001.2, rtol=0.01)
    # Trim
    rawImage.trim()
    # Test
    assert rawImage.steps['subtract_overscan']
    assert rawImage.steps['trim']
    assert rawImage.image.shape == (1, 4096, 2048)
Esempio n. 8
0
def test_overscan_subtract(deimos_flat_files):
    one_file = deimos_flat_files[0]
    spectograph = load_spectrograph('keck_deimos')
    # DEIMOS
    det = 3
    rawImage = rawimage.RawImage(one_file, spectograph, det)
    deimos_flat = processrawimage.ProcessRawImage(rawImage, par)
    # Bias subtract
    pre_sub = deimos_flat.image.copy()
    _ = deimos_flat.subtract_overscan()
    oscan = np.median(pre_sub - deimos_flat.image)
    assert np.isclose(oscan, 1001.2, rtol=0.01)
    # Trim
    _ = deimos_flat.trim()
    # Test
    assert deimos_flat.steps['subtract_overscan']
    assert deimos_flat.steps['trim']
    assert deimos_flat.image.shape == (4096, 2048)
Esempio n. 9
0
    def process_one(self,
                    filename,
                    process_steps,
                    bias,
                    pixel_flat=None,
                    illum_flat=None,
                    bpm=None):
        """
        Process a single image

        Args:
            filename (str):
                File to process
            process_steps (list):
                List of processing steps
            bias (np.ndarray or None):
                Bias image
            pixel_flat (np.ndarray, optional):
                Flat image
            illum_flat (np.ndarray, optional):
                Illumination image
            bpm (np.ndarray, optional):
                Bad pixel mask

        Returns:
            :class:`pypeit.images.pypeitimage.PypeItImage`:

        """
        # Load raw image
        rawImage = rawimage.RawImage(filename, self.spectrograph, self.det)
        # Process
        processrawImage = processrawimage.ProcessRawImage(rawImage,
                                                          self.par,
                                                          bpm=bpm)
        processedImage = processrawImage.process(process_steps,
                                                 bias=bias,
                                                 pixel_flat=pixel_flat,
                                                 illum_flat=illum_flat)
        # Return
        return processedImage
Esempio n. 10
0
    def run(self, bias=None, flatimages=None, ignore_saturation=False, sigma_clip=True,
            bpm=None, sigrej=None, maxiters=5, slits=None, dark=None, combine_method='weightmean'):
        """
        Generate a PypeItImage from a list of images

        This may also generate the ivar, crmask, rn2img and mask

        Args:
            bias (:class:`pypeit.images.buildimage.BiasImage`, optional): Bias image
            flatimages (:class:`pypeit.flatfield.FlatImages`, optional):  For flat fielding
            dark (:class:`pypeit.images.buildimage.DarkImage`, optional): Dark image
            slits (:class:`pypeit.slittrace.SlitTraceSet`, optional): Slit object
            sigma_clip (bool, optional):
                Perform sigma clipping
            sigrej (int or float, optional): Rejection threshold for sigma clipping.
                 Code defaults to determining this automatically based on the number of images provided.
            maxiters (int, optional):
                Number of iterations for the clipping
            bpm (`numpy.ndarray`_, optional):
                Bad pixel mask.  Held in ImageMask
            ignore_saturation (:obj:`bool`, optional):
                If True, turn off the saturation flag in the individual images before stacking
                This avoids having such values set to 0 which for certain images (e.g. flat calibrations)
                can have unintended consequences.
            combine_method (str):
                Method to combine images
                Allowed options are 'weightmean', 'median'

        Returns:
            :class:`pypeit.images.pypeitimage.PypeItImage`:

        """
        # Loop on the files
        nimages = len(self.files)
        lampstat = []
        for kk, ifile in enumerate(self.files):
            # Load raw image
            rawImage = rawimage.RawImage(ifile, self.spectrograph, self.det)
            # Process
            pypeitImage = rawImage.process(self.par, bias=bias, bpm=bpm, dark=dark,
                                           flatimages=flatimages, slits=slits)
            #embed(header='96 of combineimage')
            # Are we all done?
            if nimages == 1:
                return pypeitImage
            elif kk == 0:
                # Get ready
                shape = (nimages, pypeitImage.image.shape[0], pypeitImage.image.shape[1])
                img_stack = np.zeros(shape)
                ivar_stack= np.zeros(shape)
                rn2img_stack = np.zeros(shape)
                crmask_stack = np.zeros(shape, dtype=bool)
                # Mask
                bitmask = imagebitmask.ImageBitMask()
                mask_stack = np.zeros(shape, bitmask.minimum_dtype(asuint=True))
            # Grab the lamp status
            lampstat += [self.spectrograph.get_lamps_status(pypeitImage.rawheadlist)]
            # Process
            img_stack[kk,:,:] = pypeitImage.image
            # Construct raw variance image and turn into inverse variance
            if pypeitImage.ivar is not None:
                ivar_stack[kk, :, :] = pypeitImage.ivar
            else:
                ivar_stack[kk, :, :] = 1.
            # Mask cosmic rays
            if pypeitImage.crmask is not None:
                crmask_stack[kk, :, :] = pypeitImage.crmask
            # Read noise squared image
            if pypeitImage.rn2img is not None:
                rn2img_stack[kk, :, :] = pypeitImage.rn2img
            # Final mask for this image
            # TODO This seems kludgy to me. Why not just pass ignore_saturation to process_one and ignore the saturation
            # when the mask is actually built, rather than untoggling the bit here
            if ignore_saturation:  # Important for calibrations as we don't want replacement by 0
                indx = pypeitImage.bitmask.flagged(pypeitImage.fullmask, flag=['SATURATION'])
                pypeitImage.fullmask[indx] = pypeitImage.bitmask.turn_off(
                    pypeitImage.fullmask[indx], 'SATURATION')
            mask_stack[kk, :, :] = pypeitImage.fullmask

        # Check that the lamps being combined are all the same:
        if not lampstat[1:] == lampstat[:-1]:
            msgs.warn("The following files contain different lamp status")
            # Get the longest strings
            maxlen = max([len("Filename")]+[len(os.path.split(x)[1]) for x in self.files])
            maxlmp = max([len("Lamp status")]+[len(x) for x in lampstat])
            strout = "{0:" + str(maxlen) + "}  {1:s}"
            # Print the messages
            print(msgs.indent() + '-'*maxlen + "  " + '-'*maxlmp)
            print(msgs.indent() + strout.format("Filename", "Lamp status"))
            print(msgs.indent() + '-'*maxlen + "  " + '-'*maxlmp)
            for ff, file in enumerate(self.files):
                print(msgs.indent() + strout.format(os.path.split(file)[1], " ".join(lampstat[ff].split("_"))))
            print(msgs.indent() + '-'*maxlen + "  " + '-'*maxlmp)

        # Coadd them
        weights = np.ones(nimages)/float(nimages)
        img_list = [img_stack]
        var_stack = utils.inverse(ivar_stack)
        var_list = [var_stack, rn2img_stack]
        if combine_method == 'weightmean':
            img_list_out, var_list_out, gpm, nused = combine.weighted_combine(
                weights, img_list, var_list, (mask_stack == 0),
                sigma_clip=sigma_clip, sigma_clip_stack=img_stack, sigrej=sigrej, maxiters=maxiters)
        elif combine_method == 'median':
            img_list_out = [np.median(img_stack, axis=0)]
            var_list_out = [np.median(var_stack, axis=0)]
            var_list_out += [np.median(rn2img_stack, axis=0)]
            gpm = np.ones_like(img_list_out[0], dtype='bool')
        else:
            msgs.error("Bad choice for combine.  Allowed options are 'median', 'weightmean'.")

        # Build the last one
        final_pypeitImage = pypeitimage.PypeItImage(img_list_out[0],
                                                    ivar=utils.inverse(var_list_out[0]),
                                                    bpm=pypeitImage.bpm,
                                                    rn2img=var_list_out[1],
                                                    crmask=np.logical_not(gpm),
                                                    detector=pypeitImage.detector,
                                                    PYP_SPEC=pypeitImage.PYP_SPEC)
        # Internals
        final_pypeitImage.rawheadlist = pypeitImage.rawheadlist
        final_pypeitImage.process_steps = pypeitImage.process_steps

        nonlinear_counts = self.spectrograph.nonlinear_counts(pypeitImage.detector,
                                                              apply_gain=self.par['apply_gain'])
        final_pypeitImage.build_mask(saturation=nonlinear_counts)
        # Return
        return final_pypeitImage
Esempio n. 11
0
    def run(self, bias=None, flatimages=None, ignore_saturation=False, sigma_clip=True,
            bpm=None, sigrej=None, maxiters=5, slits=None, dark=None, combine_method='mean',
            mosaic=False):
        r"""
        Process and combine all images.

        All processing is performed by the
        :class:`~pypeit.images.rawimage.RawImage` class; see 
        :func:`~pypeit.images.rawimage.RawImage.process`.

        If there is only one file (see :attr:`files`), this simply processes the
        file and returns the result.
        
        If there are multiple files, all the files are processed and the
        processed images are combined based on the ``combine_method``, where the
        options are:

            - 'mean': If ``sigma_clip`` is True, this is a sigma-clipped mean;
              otherwise, this is a simple average.  The combination is done
              using :func:`~pypeit.core.combine.weighted_combine`.

            - 'median': This is a simple masked median (using
              `numpy.ma.median`_).

        The errors in the image are also propagated through the stacking
        procedure; however, this isn't a simple propagation of the inverse
        variance arrays.  The image processing produces arrays with individual
        components used to construct the variance model for an individual frame.
        See :ref:`image_proc` and :func:`~pypeit.procimg.variance_model` for a
        description of these arrays.  Briefly, the relevant arrays are the
        readnoise variance (:math:`V_{\rm rn}`), the "processing" variance
        (:math:`V_{\rm proc}`), and the image scaling (i.e., the flat-field
        correction) (:math:`s`).  The variance calculation for the stacked image
        directly propagates the error in these.  For example, the propagated
        processing variance (modulo the masking) is:

        .. math::

            V_{\rm proc,stack} = \frac{\sum_i s_i^2 V_{{\rm
            proc},i}}\frac{s_{\rm stack}^2}

        where :math:`s_{\rm stack}` is the combined image scaling array,
        combined in the same way as the image data are combined.  This ensures
        that the reconstruction of the uncertainty in the combined image
        calculated using :func:`~pypeit.procimg.variance_model` accurately
        includes, e.g., the processing uncertainty.

        The uncertainty in the combined image, however, recalculates the
        variance model, using the combined image (which should have less noise)
        to set the Poisson statistics.  The same parameters used when processing
        the individual frames are applied to the combined frame; see
        :func:`~pypeit.images.rawimage.RawImage.build_ivar`.  This calculation
        is then the equivalent of when the observed counts are replaced by the
        model object and sky counts during sky subtraction and spectral
        extraction.

        Bitmasks from individual frames in the stack are *not* propagated to the
        combined image, except to indicate when a pixel was masked for all
        images in the stack (cf., ``ignore_saturation``).  Additionally, the
        instrument-specific bad-pixel mask, see the
        :func:`~pypeit.spectrographs.spectrograph.Spectrograph.bpm` method for
        each instrument subclass, saturated-pixel mask, and other default mask
        bits (e.g., NaN and non-positive inverse variance values) are all
        propagated to the combined-image mask; see
        :func:`~pypeit.images.pypeitimage.PypeItImage.build_mask`.
        
        .. warning::

            All image processing of the data in :attr:`files` *must* result
            in images of the same shape.

        Args:
            bias (:class:`~pypeit.images.buildimage.BiasImage`, optional):
                Bias image for bias subtraction; passed directly to
                :func:`~pypeit.images.rawimage.RawImage.process` for all images.
            flatimages (:class:`~pypeit.flatfield.FlatImages`, optional):
                Flat-field images for flat fielding; passed directly to
                :func:`~pypeit.images.rawimage.RawImage.process` for all images.
            ignore_saturation (:obj:`bool`, optional):
                If True, turn off the saturation flag in the individual images
                before stacking.  This avoids having such values set to 0, which
                for certain images (e.g. flat calibrations) can have unintended
                consequences.
            sigma_clip (:obj:`bool`, optional):
                When ``combine_method='mean'``, perform a sigma-clip the data;
                see :func:`~pypeit.core.combine.weighted_combine`.
            bpm (`numpy.ndarray`_, optional):
                Bad pixel mask; passed directly to
                :func:`~pypeit.images.rawimage.RawImage.process` for all images.
            sigrej (:obj:`float`, optional):
                When ``combine_method='mean'``, this sets the sigma-rejection
                thresholds used when sigma-clipping the image combination.
                Ignored if ``sigma_clip`` is False.  If None and ``sigma_clip``
                is True, the thresholds are determined automatically based on
                the number of images provided; see
                :func:`~pypeit.core.combine.weighted_combine``.
            maxiters (:obj:`int`, optional):
                When ``combine_method='mean'``) and sigma-clipping
                (``sigma_clip`` is True), this sets the maximum number of
                rejection iterations.  If None, rejection iterations continue
                until no more data are rejected; see
                :func:`~pypeit.core.combine.weighted_combine``.
            slits (:class:`~pypeit.slittrace.SlitTraceSet`, optional):
                Slit edge trace locations; passed directly to
                :func:`~pypeit.images.rawimage.RawImage.process` for all images.
            dark (:class:`~pypeit.images.buildimage.DarkImage`, optional):
                Dark-current image; passed directly to
                :func:`~pypeit.images.rawimage.RawImage.process` for all images.
            combine_method (:obj:`str`, optional):
                Method used to combine images.  Must be ``'mean'`` or
                ``'median'``; see above.
            mosaic (:obj:`bool`, optional):
                If multiple detectors are being processes, mosaic them into a
                single image.  See
                :func:`~pypeit.images.rawimage.RawImage.process`.

        Returns:
            :class:`~pypeit.images.pypeitimage.PypeItImage`: The combination of
            all the processed images.
        """
        # Check the input (i.e., bomb out *before* it does any processing)
        if self.nfiles == 0:
            msgs.error('Object contains no files to process!')
        if self.nfiles > 1 and combine_method not in ['mean', 'median']:
            msgs.error(f'Unknown image combination method, {combine_method}.  Must be '
                       '"mean" or "median".')

        # If not provided, generate the bpm for this spectrograph and detector.
        # Regardless of the file used, this must result in the same bpm, so we
        # just use the first one.
        # TODO: Why is this done here?  It's the same thing as what's done if
        # bpm is not passed to RawImage.process...
#        if bpm is None:
#            bpm = self.spectrograph.bpm(self.files[0], self.det)

        # Loop on the files
        for kk, ifile in enumerate(self.files):
            # Load raw image
            rawImage = rawimage.RawImage(ifile, self.spectrograph, self.det)
            # Process
            pypeitImage = rawImage.process(self.par, bias=bias, bpm=bpm, dark=dark,
                                           flatimages=flatimages, slits=slits, mosaic=mosaic)

            if self.nfiles == 1:
                # Only 1 file, so we're done
                pypeitImage.files = self.files
                return pypeitImage
            elif kk == 0:
                # Allocate arrays to collect data for each frame
                shape = (self.nfiles,) + pypeitImage.shape
                img_stack = np.zeros(shape, dtype=float)
                scl_stack = np.ones(shape, dtype=float)
                rn2img_stack = np.zeros(shape, dtype=float)
                basev_stack = np.zeros(shape, dtype=float)
                gpm_stack = np.zeros(shape, dtype=bool)
                lampstat = [None]*self.nfiles
                exptime = np.zeros(self.nfiles, dtype=float)

            # Save the lamp status
            lampstat[kk] = self.spectrograph.get_lamps_status(pypeitImage.rawheadlist)
            # Save the exposure time to check if it's consistent for all images.
            exptime[kk] = pypeitImage.exptime
            # Processed image
            img_stack[kk] = pypeitImage.image
            # Get the count scaling
            if pypeitImage.img_scale is not None:
                scl_stack[kk] = pypeitImage.img_scale
            # Read noise squared image
            if pypeitImage.rn2img is not None:
                rn2img_stack[kk] = pypeitImage.rn2img * scl_stack[kk]**2
            # Processing variance image
            if pypeitImage.base_var is not None:
                basev_stack[kk] = pypeitImage.base_var * scl_stack[kk]**2
            # Final mask for this image
            # TODO: This seems kludgy to me. Why not just pass ignore_saturation
            # to process_one and ignore the saturation when the mask is actually
            # built, rather than untoggling the bit here?
            if ignore_saturation:  # Important for calibrations as we don't want replacement by 0
                pypeitImage.update_mask('SATURATION', action='turn_off')
            # Get a simple boolean good-pixel mask for all the unmasked pixels
            gpm_stack[kk] = pypeitImage.select_flag(invert=True)

        # Check that the lamps being combined are all the same:
        if not lampstat[1:] == lampstat[:-1]:
            msgs.warn("The following files contain different lamp status")
            # Get the longest strings
            maxlen = max([len("Filename")]+[len(os.path.split(x)[1]) for x in self.files])
            maxlmp = max([len("Lamp status")]+[len(x) for x in lampstat])
            strout = "{0:" + str(maxlen) + "}  {1:s}"
            # Print the messages
            print(msgs.indent() + '-'*maxlen + "  " + '-'*maxlmp)
            print(msgs.indent() + strout.format("Filename", "Lamp status"))
            print(msgs.indent() + '-'*maxlen + "  " + '-'*maxlmp)
            for ff, file in enumerate(self.files):
                print(msgs.indent()
                      + strout.format(os.path.split(file)[1], " ".join(lampstat[ff].split("_"))))
            print(msgs.indent() + '-'*maxlen + "  " + '-'*maxlmp)

        # Do a similar check for exptime
        if np.any(np.absolute(np.diff(exptime)) > 0):
            # TODO: This should likely throw an error instead!
            msgs.warn('Exposure time is not consistent for all images being combined!  '
                      'Using the average.')
            comb_texp = np.mean(exptime)
        else:
            comb_texp = exptime[0]

        # Coadd them
        if combine_method == 'mean':
            weights = np.ones(self.nfiles, dtype=float)/self.nfiles
            img_list_out, var_list_out, gpm, nstack \
                    = combine.weighted_combine(weights,
                                               [img_stack, scl_stack],  # images to stack
                                               [rn2img_stack, basev_stack], # variances to stack
                                               gpm_stack, sigma_clip=sigma_clip,
                                               sigma_clip_stack=img_stack,  # clipping based on img
                                               sigrej=sigrej, maxiters=maxiters)
            comb_img, comb_scl = img_list_out
            comb_rn2, comb_basev = var_list_out
            comb_rn2[gpm] /= comb_scl[gpm]**2
            comb_basev[gpm] /= comb_scl[gpm]**2
        elif combine_method == 'median':
            bpm_stack = np.logical_not(gpm_stack)
            nstack = np.sum(gpm_stack, axis=0)
            gpm = nstack > 0
            comb_img = np.ma.median(np.ma.MaskedArray(img_stack, mask=bpm_stack),axis=0).filled(0.)
            # TODO: I'm not sure if this is right.  Maybe we should just take
            # the masked average scale instead?
            comb_scl = np.ma.median(np.ma.MaskedArray(scl_stack, mask=bpm_stack),axis=0).filled(0.)
            # First calculate the error in the sum.  The variance is set to 0
            # for pixels masked in all images.
            comb_rn2 = np.ma.sum(np.ma.MaskedArray(rn2img_stack, mask=bpm_stack),axis=0).filled(0.)
            comb_basev = np.ma.sum(np.ma.MaskedArray(basev_stack, mask=bpm_stack),axis=0).filled(0.)
            # Convert to standard error in the median (pi/2 factor relates standard variance
            # in mean (sum(variance_i)/n^2) to standard variance in median)
            comb_rn2[gpm] *= np.pi/2/nstack[gpm]**2/comb_scl[gpm]**2
            comb_basev[gpm] *= np.pi/2/nstack[gpm]**2/comb_scl[gpm]**2
        else:
            # NOTE: Given the check at the beginning of the function, the code
            # should *never* make it here.
            msgs.error("Bad choice for combine.  Allowed options are 'median', 'mean'.")

        # Recompute the inverse variance using the combined image
        comb_var = procimg.variance_model(comb_basev,
                                          counts=comb_img if self.par['shot_noise'] else None,
                                          count_scale=comb_scl,
                                          noise_floor=self.par['noise_floor'])

        # Build the combined image
        comb = pypeitimage.PypeItImage(image=comb_img, ivar=utils.inverse(comb_var), nimg=nstack,
                                       amp_img=pypeitImage.amp_img, det_img=pypeitImage.det_img,
                                       rn2img=comb_rn2, base_var=comb_basev, img_scale=comb_scl,
                                       bpm=np.logical_not(gpm).astype(np.uint8),
                                       # NOTE: The detector is needed here so
                                       # that we can get the dark current later.
                                       detector=pypeitImage.detector,
                                       PYP_SPEC=self.spectrograph.name,
                                       units='e-' if self.par['apply_gain'] else 'ADU',
                                       exptime=comb_texp, noise_floor=self.par['noise_floor'],
                                       shot_noise=self.par['shot_noise'])

        # Internals
        # TODO: Do we need these?
        comb.files = self.files
        comb.rawheadlist = pypeitImage.rawheadlist
        comb.process_steps = pypeitImage.process_steps

        # Build the base level mask
        comb.build_mask(saturation='default', mincounts='default')

        # Flag all pixels with no contributions from any of the stacked images.
        comb.update_mask('STCKMASK', indx=np.logical_not(gpm))

        # Return
        return comb