Beispiel #1
0
    def __init__(self, type, shape=None, **kwargs):

        # Store input
        self.type = type
        self.shape = shape

        # Derive center of model shape
        if self.shape is None:
            self.center = (0, 0)
        else:
            self.center = ((self.shape[0] + 1) / 2, (self.shape[1] + 1) / 2)

        # Initialize model
        if 'gauss' in self.type.lower():
            if 'radius' in kwargs:
                # Copying the radius keyword argument into the proper Gaussian2D keywords
                kwargs['x_stddev'] = kwargs['radius']
                kwargs['y_stddev'] = kwargs['radius']
                del kwargs['radius']
            self.model = Gaussian2D(x_mean=self.center[0],
                                    y_mean=self.center[1],
                                    **kwargs)

        elif 'airy' in self.type.lower():
            self.model = AiryDisk2D(x_0=self.center[0],
                                    y_0=self.center[1],
                                    **kwargs)

        else:
            raise SpecklepyValueError('PSFModel',
                                      argname='type',
                                      argvalue=type,
                                      expected="either 'Gaussian' or 'Airy'")
Beispiel #2
0
	def model_psf(self, model, radius, psf_resolution, shape=256, **kwargs):
		"""Models the PSF given the desired model function and kwargs.

		Args:
			model (str):
				Must be either 'airydisk' or 'gaussian'.
			radius (int, float, astropy.unit.Quantity):
				Radius of the PSF model that is the radius of the first zero in an AiryDisk model or the standard
				deviation of the Gaussian model. Scalar values will be interpreted in units of arcseconds.
			psf_resolution (int, float, astropy.unit.Quantity):
				Resolution of the model PSF, equivalent to the pixel scale of the array. Scalar values will be
				interpreted in units of arcseconds.
			shape (int, optional):
				Size of the model PSF along both axes.
			kwargs are forwarded to the model function.
		"""

		# Check input parameters
		if not isinstance(model, str):
			raise SpecklepyTypeError('model_psf', 'model', type(model), 'str')

		if isinstance(radius, Quantity):
			self.radius = radius
		elif isinstance(radius, (int, float)):
			logger.warning(f"Interpreting scalar type radius as {radius} arcsec")
			self.radius = Quantity(f"{radius} arcsec")
		elif isinstance(radius, str):
			self.radius = Quantity(radius)
		else:
			raise SpecklepyTypeError('model_psf', 'radius', type(radius), 'Quantity')

		if isinstance(psf_resolution, Quantity):
			self.psf_resolution = psf_resolution
		elif isinstance(psf_resolution, (int, float)):
			logger.warning(f"Interpreting scalar type psf_resolution as {psf_resolution} arcsec")
			self.psf_resolution = Quantity(f"{psf_resolution} arcsec")
		elif isinstance(psf_resolution, str):
			self.psf_resolution = Quantity(psf_resolution)
		else:
			raise SpecklepyTypeError('model_psf', 'psf_resolution', type(psf_resolution), 'Quantity')

		if isinstance(shape, int):
			center = (shape / 2, shape / 2)
			shape = (shape, shape)
		elif isinstance(shape, tuple):
			center = (shape[0] / 2, shape[1] / 2)
		else:
			raise SpecklepyTypeError('model_psf', 'shape', type(shape), 'int or tuple')

		if model.lower() == 'airydisk':
			model = models.AiryDisk2D(x_0=center[0], y_0=center[1], radius=float(self.radius / self.psf_resolution))
		elif model.lower() == 'gaussian':
			stddev = float(self.radius / self.psf_resolution)
			model = models.Gaussian2D(x_mean=center[0], y_mean=center[1], x_stddev=stddev, y_stddev=stddev)
		else:
			raise SpecklepyValueError('model_psf', 'model', model, 'either AiryDisk or Gaussian')

		y, x = np.mgrid[0:shape[0], 0:shape[1]]
		self.psf = model(x, y)
		self.psf = self.normalize(self.psf)
Beispiel #3
0
def imshow(image,
           title=None,
           norm=None,
           colorbar_label=None,
           saveto=None,
           maximize=False):
    """Shows a 2D image.

    Args:
        image (np.ndarray, ndim=2):
            Image to be plotted.
        title (str, optional):
            Plot title. Default is None.
        norm (str, optional):
            Can be set to 'log', for plotting in logarithmic scale. Default is
            None.
        colorbar_label (str, optional):
            Label of the color bar. Default is None.
        saveto (str, optional):
            Path to save the plot to. Default is None.
        maximize (bool, optional):
            Set true for showing the plot on full screen. Default is False.
    """

    if isinstance(image, np.ndarray):
        if image.ndim != 2:
            raise SpecklepyValueError('imshow()', 'image.ndim', image.ndim,
                                      '2')
        if isinstance(image, u.Quantity):
            unit = image.unit
            colorbar_label = "({})".format(unit)
            image = image.value
    else:
        raise SpecklepyTypeError('imshow()', 'image', type(image),
                                 'np.ndarray')

    if norm == 'log':
        norm = clrs.LogNorm()
    plt.figure()
    plt.imshow(image, norm=norm, origin='lower')
    plt.title(title)
    if maximize:
        maximize_plot()

    # Colorbar
    cbar = plt.colorbar(pad=0.0)
    if colorbar_label is not None:
        cbar.set_label(colorbar_label)

    if saveto is not None:
        plt.savefig(saveto, dpi=300)

    plt.show()
    plt.close()
Beispiel #4
0
 def set_height(self, val):
     if isinstance(val, str):
         if val == 'text':
             val = 10
         elif val == 'column':
             val = 5
         else:
             raise SpecklepyValueError('Plot.set_height',
                                       argname='val',
                                       argvalue=val,
                                       expected="'text' or 'column'")
     self.figure.set_figheight(val=val)
Beispiel #5
0
 def set_width(self, val):
     if isinstance(val, str):
         if val == 'text':
             val = 10
         elif val == 'column':
             val = 5
         else:
             raise SpecklepyValueError('Plot.set_width',
                                       argname='val',
                                       argvalue=val,
                                       expected="'text' or 'column'")
     elif not isinstance(val, (int, float)):
         raise SpecklepyTypeError('Plot.set_width',
                                  argname='val',
                                  argtype=type(val),
                                  expected='str or float')
     self.figure.set_figwidth(val=val)
Beispiel #6
0
    def magnitude_to_flux(self, magnitude):
        """Convert magnitudes to flux values.

        Args:
            magnitude (int, float, or Quantity):
                Magnitude value

        Returns:
            flux (Quantity):
                Brightness converted into flux units.
        """
        if isinstance(magnitude, (int, float, np.ndarray)):
            return 10**(magnitude / -2.5) * self.band_reference_flux
        elif isinstance(magnitude, Quantity):
            if magnitude.unit != Unit('mag'):
                raise SpecklepyValueError('magnitude_to_flux()',
                                          'magnitude unit', magnitude.unit,
                                          'mag')
            else:
                return 10**(magnitude.value / -2.5) * self.band_reference_flux
Beispiel #7
0
    def create_long_exposures(self, alignment_method):
        """Compute long exposures from the input data cubes."""

        # Initialize list of long exposure files
        long_exposure_files = []

        # Iterate over input data cubes
        for file in self.in_files:

            # Read data from file
            cube = fits.getdata(os.path.join(self.in_dir, file))
            image = None
            image_var = None

            # Compute collapsed or SSA'ed images from the cube
            if alignment_method == 'collapse':
                image = np.sum(cube, axis=0)
                tmp_file = 'int_' + os.path.basename(file)
            elif alignment_method == 'ssa':
                image, image_var = coadd_frames(cube=cube, box=self.box)
                tmp_file = 'ssa_' + os.path.basename(file)
            else:
                raise SpecklepyValueError('Reconstruction', 'alignment_method', alignment_method,
                                          expected="either 'collapse' or 'ssa'")

            # Store data to a new Outfile instance
            tmp_path = os.path.join(self.tmp_dir, tmp_file)
            logger.info(f"Saving temporary reconstruction of cube {file} to {tmp_path}")
            tmp_file_object = Outfile(tmp_path, data=image, verbose=True)
            if image_var is not None:
                tmp_file_object.new_extension(name=self.var_ext, data=image_var)

            # Add the recently created file to the list
            long_exposure_files.append(tmp_file)

        return long_exposure_files
Beispiel #8
0
	def normalize(array, mode='sum_circular'):
		"""Normalizes the input array depending on the mode.

		Args:
			array (np.ndarray):
				Array to be normalized.
			mode (str, optional):
				Can be either 'sum' for having a sum of 1, 'max' for having a peak value 1, or 'sum_circular' for
				subtracting a constant and then normalizing to a sum of 1. Default is 'sum_circular'.

		Returns:
			normalized (np.ndarray):
				Normalized array, according to mode.
		"""

		if not isinstance(array, np.ndarray):
			raise SpecklepyTypeError('normalize', 'array', type(array), 'np.ndarray')
		if np.sum(array) == 0:
			raise ValueError("Normalize received an array of zeros!")

		if mode not in ['sum', 'max', 'peak', 'sum_circular']:
			raise SpecklepyValueError('normalize', 'mode', mode, "'sum', 'max', or 'sum_circular'")

		if mode == 'sum':
			normalized = array / np.sum(array)
		elif mode == 'max':
			normalized = array / np.max(array)
		elif mode == 'sum_circular':
			x, y = array.shape
			low_cut = array[0, int(y/2)]
			array = np.maximum(array - low_cut, 0)
			normalized = array / np.sum(array)
		else:
			normalized = None

		return normalized
Beispiel #9
0
def holography(params, mode='same', debug=False):
    """Execute the holographic image reconstruction.

    The holographic image reconstruction is an algorithm as outlined, eg. by Schoedel et al (2013, Section 3). This
    function follows that algorithm, see comments in the code. Most of the important functions are imported from other
    modules of specklepy.

    Args:
        params (dict):
            Dictionary that carries all important parameters.
        mode (str, optional):
            Define the size of the output image as 'same' to the reference
            image or expanding to include the 'full' covered field. Default is
            'same'.
        debug (bool, optional):
            Set to True to inspect intermediate results.
            Default is False.

    Returns:
        image (np.ndarray): The image reconstruction.
    """

    logger.info(f"Starting holographic reconstruction...")
    file_archive = FileArchive(file_list=params['PATHS']['inDir'],
                               cards=[],
                               dtypes=[])
    in_files = file_archive.files
    in_dir = file_archive.in_dir
    tmp_dir = params['PATHS']['tmpDir']

    # Input check
    if mode not in ['same', 'full', 'valid']:
        raise SpecklepyValueError('holography()',
                                  argname='mode',
                                  argvalue=mode,
                                  expected="either 'same', 'full', or 'valid'")

    if 'apodizationType' in params['APODIZATION']:
        # Catch deprecated parameter name
        logger.warning(
            "Parameter 'apodizationType' is deprecated. Use 'type' instead!")
        params['APODIZATION']['type'] = params['APODIZATION'][
            'apodizationType']
    if 'apodizationWidth' in params['APODIZATION']:
        # Catch deprecated parameter name
        logger.warning(
            "Parameter 'apodizationWidth' is deprecated. Use 'radius' instead!"
        )
        params['APODIZATION']['radius'] = params['APODIZATION'][
            'apodizationWidth']
    if params['APODIZATION']['type'] is None or params['APODIZATION'][
            'type'].lower() not in ['gaussian', 'airy']:
        logger.error(
            f"Apodization type has not been set or of wrong type ({params['APODIZATION']['type']})"
        )
    if params['APODIZATION']['radius'] is None or not isinstance(
            params['APODIZATION']['radius'], (int, float)):
        logger.error(
            f"Apodization radius has not been set or of wrong type ({params['APODIZATION']['radius']})"
        )

    # Initialize the outfile
    out_file = ReconstructionFile(filename=params['PATHS']['outFile'],
                                  files=in_files,
                                  cards={"RECONSTRUCTION": "Holography"},
                                  in_dir=in_dir)

    # Initialize reconstruction
    reconstruction = Reconstruction(
        in_files=in_files,
        mode=mode,
        alignment_method='ssa',
        reference_image=params['PATHS']['alignmentReferenceFile'],
        in_dir=in_dir,
        tmp_dir=tmp_dir,
        out_file=params['PATHS']['outFile'],
        var_ext=params['OPTIONS']['varianceExtensionName'],
        box_indexes=params['OPTIONS']['box_indexes'],
        debug=debug)

    # (i-ii) Align cubes
    # shifts = get_shifts(files=in_files, reference_file=params['PATHS']['alignmentReferenceFile'],
    #                     lazy_mode=True, return_image_shape=False, in_dir=in_dir, debug=debug)
    shifts = reconstruction.shifts

    # (iii) Compute SSA reconstruction
    # image = ssa(in_files, mode=mode, outfile=out_file, in_dir=in_dir, tmp_dir=tmp_dir,
    #             variance_extension_name=params['OPTIONS']['varianceExtensionName'])
    image = reconstruction.coadd_long_exposures()
    if isinstance(image, tuple):
        # SSA returned a reconstruction image and a variance image
        image, image_var = image
    total_flux = np.sum(image)  # Stored for flux conservation

    # Start iteration from steps (iv) through (xi)
    while True:
        # (iv) Astrometry and photometry, i.e. StarFinder
        extract_sources(image=image,
                        fwhm=params['STARFINDER']['starfinderFwhm'],
                        noise_threshold=params['STARFINDER']['noiseThreshold'],
                        background_subtraction=True,
                        write_to=params['PATHS']['allStarsFile'],
                        star_finder='DAO',
                        debug=debug)

        # (v) Select reference stars
        print(
            "\tPlease copy your desired reference stars from the all stars file into the reference star file!"
        )
        input("\tWhen you are done, hit a ENTER.")

        # (vi) PSF extraction
        ref_stars = ReferenceStars(
            psf_radius=params['PSFEXTRACTION']['psfRadius'],
            reference_source_file=params['PATHS']['refSourceFile'],
            in_files=in_files,
            save_dir=tmp_dir,
            in_dir=in_dir,
            field_segmentation=params['PSFEXTRACTION']['fieldSegmentation'])
        if params['PSFEXTRACTION']['mode'].lower() == 'epsf':
            psf_files = ref_stars.extract_epsfs(file_shifts=shifts,
                                                debug=debug)
        elif params['PSFEXTRACTION']['mode'].lower() in [
                'mean', 'median', 'weighted_mean'
        ]:
            psf_files = ref_stars.extract_psfs(
                file_shifts=shifts,
                mode=params['PSFEXTRACTION']['mode'].lower(),
                debug=debug)
        else:
            raise RuntimeError(
                f"PSF extraction mode '{params['PSFEXTRACTION']['mode']}' is not understood!"
            )
        logger.info("Saved the extracted PSFs...")

        # (vii) Noise thresholding
        psf_noise_mask = None
        for file in psf_files:
            with fits.open(file, mode='update') as hdu_list:
                n_frames = hdu_list[0].header['NAXIS3']
                if psf_noise_mask is None:
                    psf_noise_mask = get_noise_mask(
                        hdu_list[0].data[0],
                        noise_reference_margin=params['PSFEXTRACTION']
                        ['noiseReferenceMargin'])
                for index in range(n_frames):
                    reference = np.ma.masked_array(hdu_list[0].data[index],
                                                   mask=psf_noise_mask)
                    background = np.mean(reference)
                    noise = np.std(reference)
                    update = np.maximum(
                        hdu_list[0].data[index] - background -
                        params['PSFEXTRACTION']['noiseThreshold'] * noise, 0.0)
                    if np.sum(update) == 0.0:
                        raise ValueError(
                            "After background subtraction and noise thresholding, no signal is leftover. "
                            "Please reduce the noiseThreshold!")
                    update = update / np.sum(update)  # Flux sum of order unity
                    hdu_list[0].data[index] = update
                    hdu_list.flush()

        # (viii) Subtraction of secondary sources within the reference apertures
        # TODO: Implement Secondary source subtraction
        pass

        # (ix) Estimate object, following Eq. 1 (Schoedel et al., 2013)
        f_object = FourierObject(in_files,
                                 psf_files,
                                 shifts=shifts,
                                 mode=mode,
                                 in_dir=in_dir)
        f_object.coadd_fft()

        # (x) Apodization
        f_object.apodize(type=params['APODIZATION']['type'],
                         radius=params['APODIZATION']['radius'])

        # (xi) Inverse Fourier transform to retain the reconstructed image
        image = f_object.ifft(total_flux=total_flux)

        # Inspect the latest reconstruction
        if debug:
            imshow(image)

        # Save the latest reconstruction image to outfile
        out_file.data = image

        # Ask the user whether the iteration shall be continued or not
        answer = input(
            "\tDo you want to continue with one more iteration? [yes/no]\n\t")
        if answer.lower() in ['n', 'no']:
            break

    # Repeat astrometry and photometry, i.e. StarFinder on final image
    extract_sources(image=image,
                    fwhm=params['STARFINDER']['starfinderFwhm'],
                    noise_threshold=params['STARFINDER']['noiseThreshold'],
                    background_subtraction=True,
                    write_to=params['PATHS']['allStarsFile'],
                    star_finder='DAO',
                    debug=debug)

    # Finally return the image
    return image
Beispiel #10
0
def get_shifts(files,
               reference_file=None,
               mode='correlation',
               lazy_mode=True,
               return_image_shape=False,
               in_dir=None,
               debug=False):
    """Computes the the relative shift of data cubes relative to a reference
    image.

    This function iterates over a list of files and uses the module function get_shift in 'correlation' mode to compute
    the relative shifts of files with respect to a reference file.

    Args:
        files (list or array_like):
            List of files to align.
        reference_file (str, int, optional):
            Path to a reference file or index of the file in files, relative to which the shifts are computed. Default
            is 0.
        mode (str, optional):
            Mode of the shift estimate. In 'correlation' mode, a 2D correlation is used to estimate the shift of the
            array. This is computationally much more expensive than the identical 'maximum' or 'peak' modes, which
            simply identify the coordinates of the emission peaks and return the difference. Though these modes may be
            fooled by reference sources of similar brightness. Passed to get_shift() function. Default is 'correlation'.
        lazy_mode (bool, optional):
            Set to False, to enforce the alignment of a single file with respect to the reference file. Default is True.
        return_image_shape (bool, optional):
            Set to True for for returning the shape of the anticipated output image. Default is False.
        in_dir (str, optional):
            Path to the files. `None` is substituted by an empty string.
        debug (bool, optional):
            If set to True, it shows the 2D correlation.

    Returns:
        shifts (list):
            List of shifts for each file relative to the reference file.
    """

    # Check input parameters
    if not isinstance(files, (list, np.ndarray)):
        if isinstance(files, str):
            files = [files]
        else:
            raise SpecklepyTypeError('get_shifts()',
                                     argname='files',
                                     argtype=type(files),
                                     expected='list')

    if reference_file is None:
        reference_file = files[0]
    elif isinstance(reference_file, int):
        reference_file = files[reference_file]
    elif not isinstance(reference_file, str):
        raise SpecklepyTypeError('get_shifts()',
                                 argname='reference_file',
                                 argtype=type(reference_file),
                                 expected='str')

    if isinstance(mode, str):
        if mode not in ['correlation', 'maximum', 'peak']:
            raise SpecklepyValueError(
                'get_shifts()',
                argname='mode',
                argvalue=mode,
                expected="'correlation', 'maximum' or 'peak'")
    else:
        raise SpecklepyTypeError('get_shifts()',
                                 argname='mode',
                                 argtype=type(mode),
                                 expected='str')

    if not isinstance(lazy_mode, bool):
        raise SpecklepyTypeError('get_shifts()',
                                 argname='lazy_mode',
                                 argtype=type(lazy_mode),
                                 expected='bool')

    if not isinstance(return_image_shape, bool):
        raise SpecklepyTypeError('get_shifts()',
                                 argname='return_image_shape',
                                 argtype=type(return_image_shape),
                                 expected='bool')

    if in_dir is None:
        in_dir = ''

    # Skip computations if only one file is provided
    if lazy_mode and len(files) == 1:
        logger.info("Only one data cube is provided, nothing to align.")
        shifts = [(0, 0)]
        image_shape = fits.getdata(os.path.join(in_dir, files[0])).shape
        image_shape = (image_shape[-2], image_shape[-1])

    # Otherwise estimate shifts
    else:
        shifts = []

        # Identify reference file and Fourier transform the integrated image
        logger.info(
            f"Computing relative shifts between data cubes. Reference file is {reference_file}"
        )
        reference_image = fits.getdata(os.path.join(in_dir, reference_file))
        if reference_image.ndim == 3:
            # Integrating over time axis if reference image is a cube
            reference_image = np.sum(reference_image, axis=0)
        f_reference_image = np.fft.fft2(reference_image)
        image_shape = reference_image.shape
        del reference_image

        # Iterate over files and estimate shift via 2D correlation of the integrated cubes
        for index, file in enumerate(files):
            if file == reference_file:
                shift = (0, 0)
            else:
                image = fits.getdata(os.path.join(in_dir, file))
                if image.ndim == 3:
                    image = np.sum(image, axis=0)
                shift = get_shift(image,
                                  reference_image=f_reference_image,
                                  is_fourier_transformed=True,
                                  mode=mode,
                                  debug=debug)
            shifts.append(shift)
            logger.info(f"Identified a shift of {shift} for file {file}")
        logger.info(f"Identified the following shifts:\n\t{shifts}")

    if return_image_shape:
        return shifts, image_shape
    else:
        return shifts
Beispiel #11
0
def coadd_frames(cube, var_cube=None, box=None):
    """Compute the simple shift-and-add (SSA) reconstruction of a data cube.

    This function uses the SSA algorithm to coadd frames of a cube. If provided, this function coadds the variances
    within a var cube considering the exact same shifts.

    Args:
        cube (np.ndarray, ndim=3):
            Data cube which is integrated along the zero-th axis.
        var_cube (np.ndarray, ndim=3, optional):
            Data cube of variances which is integrated along the zero-th axis with the same shifts as the cube.
        box (Box object, optional):
            Constraining the search for the intensity peak to the specified box. Searching the full frames if not
            provided.

    Returns:
        coadded (np.ndarray, ndim=2):
            SSA-integrated frames of the input cube.
        var_coadded (np.ndarray, ndim=2):
            SSA-integrated variances of the input cube or the variance map itself if provided as a 2D cube.
    """

    if not isinstance(cube, np.ndarray):
        raise SpecklepyTypeError('coadd_frames()',
                                 argname='cube',
                                 argtype=type(cube),
                                 expected='np.ndarray')
    if cube.ndim is not 3:
        raise SpecklepyValueError('coadd_frames()',
                                  argname='cube.ndim',
                                  argvalue=cube.ndim,
                                  expected='3')

    if var_cube is not None:
        if not isinstance(var_cube, np.ndarray):
            raise SpecklepyTypeError('coadd_frames()',
                                     argname='var_cube',
                                     argtype=type(var_cube),
                                     expected='np.ndarray')
        if var_cube.ndim == cube.ndim and var_cube.shape != cube.shape:
            raise SpecklepyValueError('coadd_frames()',
                                      argname='var_cube.shape',
                                      argvalue=str(var_cube.shape),
                                      expected=str(cube.shape))
        elif var_cube.ndim == cube.ndim - 1:
            if var_cube.shape[0] != cube.shape[1] or var_cube.shape[
                    1] != cube.shape[2]:
                raise SpecklepyValueError('coadd_frames()',
                                          argname='var_cube.shape',
                                          argvalue=str(var_cube.shape),
                                          expected=str(cube.shape))

    # Compute shifts
    peak_indizes = np.zeros((cube.shape[0], 2), dtype=int)
    for index, frame in enumerate(cube):
        if box is not None:
            frame = box(frame)
        peak_indizes[index] = np.array(np.unravel_index(
            np.argmax(frame, axis=None), frame.shape),
                                       dtype=int)

    # Compute shifts from indizes
    peak_indizes = peak_indizes.transpose()
    xmean, ymean = np.mean(np.array(peak_indizes), axis=1)
    xmean = int(xmean)
    ymean = int(ymean)
    shifts = np.array([xmean - peak_indizes[0], ymean - peak_indizes[1]])
    shifts = shifts.transpose()

    # Shift frames and add to coadded
    coadded = np.zeros(cube[0].shape)
    pad_vectors, ref_pad_vector = alignment.get_pad_vectors(
        shifts, cube_mode=False, return_reference_image_pad_vector=True)
    for index, frame in enumerate(cube):
        coadded += alignment.pad_array(
            frame,
            pad_vectors[index],
            mode='same',
            reference_image_pad_vector=ref_pad_vector)

    # Coadd variance cube (if not an image itself)
    if var_cube is not None:
        if var_cube.ndim == 3:
            var_coadded = np.zeros(coadded.shape)
            for index, frame in enumerate(var_cube):
                var_coadded += alignment.pad_array(
                    frame,
                    pad_vectors[index],
                    mode='same',
                    reference_image_pad_vector=ref_pad_vector)
        elif var_cube.ndim == 2:
            var_coadded = var_cube
        else:
            raise RuntimeError(
                f"var_cube has unexpected shape: {var_cube.shape}")
    else:
        var_coadded = None

    return coadded, var_coadded
Beispiel #12
0
def extract_sources(image,
                    noise_threshold,
                    fwhm,
                    star_finder='DAO',
                    image_var=None,
                    background_subtraction=True,
                    write_to=None,
                    debug=True):
    """Extract sources from an image with a StarFinder routine.

    Long description...

    Args:
        image (np.ndarray or str):
            Image array or the name of a file containing the image array.
        noise_threshold (float):
            Multiple of the uncertainty/ standard deviation of the image.
        fwhm (float):
            Expected full width at half maximum (FWHM) of the sources in units of pixels.
        star_finder (str, optional):
            Choose whether the 'DAO' or 'IRAF' StarFinder implementations from photutils shall be used. Default is
            'DAO'.
        image_var (float or str):
            Variance of the image used for the StarFinder threshold (=noise_threshold * sqrt(image_var)). If not
            provided, the code extracts this value from sigma clipped stats. If provided as str-type, the code tries to
            use this as a key to the FITS file HDU list.
        background_subtraction (bool, optional):
            Let the StarFinder consider the background subtraction. Set False for ignoring background flux. Default is
            `True`.
        write_to (str, optional):
            If provided as a str, the list of identified sources is saved to this file.
        debug (bool, optional):
            Show debugging information. Default is `False`.

    Returns:
        sources (astropy.table.Table): Table of identified sources, None if no
            sources are detected.
    """

    # Set logger level
    if debug:
        logger.setLevel('DEBUG')

    # Input parameters
    if isinstance(image, np.ndarray):
        filename = 'current cube'
    elif isinstance(image, str):
        logger.info(
            "The argument image '{}' is interpreted as file name.".format(
                image))
        filename = image
        image = fits.getdata(filename)
        image = image.squeeze()
    else:
        raise SpecklepyTypeError('extract_sources()',
                                 argname='image',
                                 argtype=type(image),
                                 expected='np.ndarray or str')

    # Prepare noise statistics
    mean, median, std = sigma_clipped_stats(image, sigma=3.0)
    logger.info(
        f"Noise statistics for {filename}:\n\tMean = {mean:.3}\n\tMedian = {median:.3}\n\tStdDev = {std:.3}"
    )

    # Set detection threshold
    if image_var is None:
        threshold = noise_threshold * std
    else:
        if isinstance(image_var, str):
            # Try to load variance extension from file
            image_var = fits.getdata(filename, image_var)
            image_var = np.mean(image_var)
        threshold = noise_threshold * np.sqrt(image_var)

    # Set sky background
    if background_subtraction:
        logger.info(f"Considering mean sky background of {mean}")
        sky = mean
    else:
        sky = 0.0

    # Instantiate StarFinder object
    if not isinstance(star_finder, str):
        raise SpecklepyTypeError('extract_sources',
                                 argname='starfinder',
                                 argtype=type(star_finder),
                                 expected='str')
    if 'dao' in star_finder.lower():
        star_finder = DAOStarFinder(fwhm=fwhm, threshold=threshold, sky=sky)
    elif 'iraf' in star_finder.lower():
        star_finder = IRAFStarFinder(fwhm=fwhm, threshold=threshold, sky=sky)
    else:
        raise SpecklepyValueError('extract_sources',
                                  argname='star_finder',
                                  argvalue=star_finder,
                                  expected="'DAO' or 'IRAF")

    # Find stars
    logger.info("Extracting sources...")
    sources = star_finder(image)

    # Reformatting sources table
    sources.sort('flux', reverse=True)
    sources.rename_column('xcentroid', 'x')
    sources.rename_column('ycentroid', 'y')
    sources.keep_columns(['x', 'y', 'flux'])

    # Add terminal output
    logger.info(f"Extracted {len(sources)} sources")
    logger.debug(sources)

    # Save sources table to file, if requested
    if write_to is not None:
        logger.info("Writing list of sources to file {}".format(write_to))
        sources.write(write_to, format='ascii.fixed_width', overwrite=True)

    return sources
Beispiel #13
0
    def combine(self, method='clip'):
        """Combine the frames of the stored files to a master flat.

        Args:
            method (str, optional):
                Method for the frame combination. Can be either 'median' for a
                conventional median combination without propagation of
                uncertainties (since the median does not allow for this) or
                'clip' for sigma clipping and a subsequent estimate of the
                variance of the cube, followed by a mean combination.
        """

        # Type check
        if not isinstance(method, str):
            raise SpecklepyTypeError('MasterFlat.combine()', argname='method', argtype=type(method), expected='str')

        # Read image frames from file
        logger.info("Combining the following file list to a master flat:")
        for index, file in enumerate(self.files):
            logger.info("{:4}: {}".format(index, file))
            data = fits.getdata(os.path.join(self.file_path, file))

            # Create a master flat
            if index is 0:
                flats = data
            else:
                np.append(flats, data, axis=0)

        # Collapse master flat along axis 0
        if method is 'median':
            master_flat = np.median(flats, axis=0)
            master_flat_var = None
        elif method is 'clip':
            flats = sigma_clip(flats, axis=0, masked=True)
            master_flat = np.mean(flats, axis=0)
            master_flat_var = np.var(flats, axis=0)
        else:
            raise SpecklepyValueError('MasterFlat.combine()', argname='method', argvalue=method,
                                      expected="'clip' or 'median'")
        del flats

        # Normalize the master flat
        logger.info(f"Normalizing master flat in {method} mode...")
        if method is 'median':
            norm = np.median(master_flat)
            master_flat_normed = np.divide(master_flat, norm)
            master_flat_normed_var = None
        elif method is 'clip':
            norm = np.mean(master_flat)
            norm_var = np.var(master_flat)
            master_flat_normed = np.divide(master_flat, norm)
            master_flat_normed_var = np.divide(master_flat_var, np.square(norm)) + \
                                     np.divide(np.square(master_flat), np.power(norm, 4)) * norm_var
        else:
            master_flat_normed = None
            master_flat_normed_var = None

        # # Store master flat to file
        # if not hasattr(self, 'masterfile'):
        #     self.master_file = MasterFile(self.file_name, files=self.files, shape=master_flat_normed.shape,
        #                                   header_card_prefix='HIERARCH SPECKLEPY')

        # Replace masked values by NaNs
        master_flat_normed = np.where(master_flat_normed.mask, np.nan, master_flat_normed)
        self.master_file.data = master_flat_normed

        # Store variance in extension
        if 'master_flat_normed_var' in locals():
            # Replace masked values by NaNs
            master_flat_normed_var = np.where(master_flat_normed_var.mask, np.nan, master_flat_normed_var)
            self.master_file.new_extension('VAR', data=master_flat_normed_var)
Beispiel #14
0
def ssa(files,
        mode='same',
        reference_file=None,
        outfile=None,
        in_dir=None,
        tmp_dir=None,
        lazy_mode=True,
        box_indexes=None,
        debug=False,
        **kwargs):
    """Compute the SSA reconstruction of a list of files.

    The simple shift-and-add (SSA) algorithm makes use of the structure of typical speckle patterns, i.e.
    short-exposure point-spread functions (PSFs). These show multiple peaks resembling the diffraction-limited PSF of
    coherent fractions within the telescope aperture. Under good conditions or on small telescopes, there is typically
    one largest coherent atmospheric cell and therefore, speckle PSFs typically show one major intensity peak. The
    algorithm makes use of this fact and identifies the emission peak in a given observation frame, assuming that this
    always belongs to the same star, and aligns all frames on the coordinate of the emission peak.

    See Bates & Cady (1980) for references.

    Args:
        files (list or array_like):
            List of complete paths to the fits files that shall be considered for the SSA reconstruction.
        mode (str):
            Name of the reconstruction mode: In 'same' mode, the reconstruction covers the same field of view of the
            reference file. In 'full' mode, every patch of the sky that is covered by at least one frame will be
            contained in the final reconstruction.
        reference_file (str, int, optional):
            Path to a reference file or index of the file in files, relative to which the shifts are computed. See
            specklepy.core.aligment.get_shifts for details. Default is 0.
        outfile (specklepy.io.recfile, optional):
            Object to write the result to, if provided.
        in_dir (str, optional):
            Path to the files. `None` is substituted by an empty string.
        tmp_dir (str, optional):
            Path of a directory in which the temporary results are stored in.
        lazy_mode (bool, optional):
            Set to False, to enforce the alignment of a single file with respect to the reference file. Default is True.
        box_indexes (list, optional):
            Constraining the search for the intensity peak to the specified box. Searching the full frames if not
            provided.
        debug (bool, optional):
            Show debugging information. Default is False.

    Returns:
        reconstruction (np.ndarray):
            The image reconstruction. The size depends on the mode argument.
    """

    logger.info("Starting SSA reconstruction...")
    # Check parameters
    if not isinstance(files, (list, np.ndarray)):
        if isinstance(files, str):
            files = [files]
        else:
            raise SpecklepyTypeError('ssa()',
                                     argname='files',
                                     argtype=type(files),
                                     expected='list')

    if isinstance(mode, str):
        if mode not in ['same', 'full', 'valid']:
            raise SpecklepyValueError('ssa()',
                                      argname='mode',
                                      argvalue=mode,
                                      expected="'same', 'full' or 'valid'")
    else:
        raise SpecklepyTypeError('ssa()',
                                 argname='mode',
                                 argtype=type(mode),
                                 expected='str')

    if reference_file is None:
        reference_file = files[0]
    elif isinstance(reference_file, int):
        reference_file = files[reference_file]
    elif not isinstance(reference_file, str):
        raise SpecklepyTypeError('ssa()',
                                 argname='reference_file',
                                 argtype=type(reference_file),
                                 expected='str or int')

    if outfile is None:
        pass
    elif isinstance(outfile, str):
        outfile = ReconstructionFile(files=files,
                                     filename=outfile,
                                     cards={"RECONSTRUCTION": "SSA"})
    elif isinstance(outfile, ReconstructionFile):
        pass
    else:
        raise SpecklepyTypeError('ssa()',
                                 argname='outfile',
                                 argtype=type(outfile),
                                 expected='str')

    if in_dir is None:
        in_dir = ''
    reference_file = os.path.join(in_dir, reference_file)

    if tmp_dir is not None:
        if isinstance(tmp_dir, str) and not os.path.isdir(tmp_dir):
            os.makedirs(tmp_dir)

    if not isinstance(lazy_mode, bool):
        raise SpecklepyTypeError('ssa()',
                                 argname='lazy_mode',
                                 argtype=type(lazy_mode),
                                 expected='bool')

    if box_indexes is not None:
        box = Box(box_indexes)
    else:
        box = None

    if 'variance_extension_name' in kwargs.keys():
        var_ext = kwargs['variance_extension_name']
    else:
        var_ext = 'VAR'

    if debug:
        logger.setLevel('DEBUG')
        logger.handlers[0].setLevel('DEBUG')
        logger.info("Set logging level to DEBUG")

    # Align reconstructions if multiple files are provided
    if lazy_mode and len(files) == 1:

        # Do not align just a single file
        with fits.open(os.path.join(in_dir, files[0])) as hdu_list:
            cube = hdu_list[0].data
            if var_ext in hdu_list:
                var_cube = hdu_list[var_ext].data
            else:
                var_cube = None
            reconstruction, reconstruction_var = coadd_frames(
                cube, var_cube=var_cube, box=box)

    else:

        # Compute temporary reconstructions of the individual cubes
        tmp_files = []
        for index, file in enumerate(files):
            with fits.open(os.path.join(in_dir, file)) as hdu_list:
                cube = hdu_list[0].data
                if var_ext in hdu_list:
                    var_cube = hdu_list[var_ext].data
                    logger.debug(
                        f"Found variance extension {var_ext} in file {file}")
                else:
                    logger.debug(
                        f"Did not find variance extension {var_ext} in file {file}"
                    )
                    var_cube = None
                tmp, tmp_var = coadd_frames(cube, var_cube=var_cube, box=box)

            if debug:
                imshow(box(tmp), norm='log')

            tmp_file = os.path.basename(file).replace(".fits", "_ssa.fits")
            tmp_file = os.path.join(tmp_dir, tmp_file)
            logger.info(
                "Saving interim SSA reconstruction of cube to {}".format(
                    tmp_file))
            tmp_file_object = Outfile(tmp_file, data=tmp, verbose=True)

            # Store variance of temporary reconstruction
            if tmp_var is not None:
                tmp_file_object.new_extension(var_ext, data=tmp_var)
                del tmp_var
            tmp_files.append(tmp_file)

        # Align tmp reconstructions and add up
        file_shifts, image_shape = alignment.get_shifts(
            tmp_files,
            reference_file=reference_file,
            return_image_shape=True,
            lazy_mode=True)
        pad_vectors, ref_pad_vector = alignment.get_pad_vectors(
            file_shifts,
            cube_mode=(len(image_shape) == 3),
            return_reference_image_pad_vector=True)

        # Iterate over file-wise reconstructions
        reconstruction = None
        reconstruction_var = None
        for index, file in enumerate(tmp_files):

            # Read data
            with fits.open(file) as hdu_list:
                tmp_image = hdu_list[0].data
                if var_ext in hdu_list:
                    tmp_image_var = hdu_list[var_ext].data
                else:
                    tmp_image_var = None

            # Initialize or co-add reconstructions and var images
            if reconstruction is None:
                reconstruction = alignment.pad_array(
                    tmp_image,
                    pad_vectors[index],
                    mode=mode,
                    reference_image_pad_vector=ref_pad_vector)
                if tmp_image_var is not None:
                    reconstruction_var = alignment.pad_array(
                        tmp_image_var,
                        pad_vectors[index],
                        mode=mode,
                        reference_image_pad_vector=ref_pad_vector)
            else:
                reconstruction += alignment.pad_array(
                    tmp_image,
                    pad_vectors[index],
                    mode=mode,
                    reference_image_pad_vector=ref_pad_vector)
                if tmp_image_var is not None:
                    reconstruction_var += alignment.pad_array(
                        tmp_image_var,
                        pad_vectors[index],
                        mode=mode,
                        reference_image_pad_vector=ref_pad_vector)
    logger.info("Reconstruction finished...")

    # Save the result to an Outfile
    if outfile is not None:
        outfile.data = reconstruction
        if reconstruction_var is not None:
            outfile.new_extension(name=var_ext, data=reconstruction_var)

    # Return reconstruction (and the variance map if computed)
    if reconstruction_var is not None:
        return reconstruction, reconstruction_var
    return reconstruction
Beispiel #15
0
    def __init__(self, in_files, psf_files, shifts, mode='same', in_dir=None):
        """ Initialize a FourierObject instance.

        Args:
            in_files (list):
                List of paths of the input files.
            psf_files (list):
                List of paths of the PSF files.
            shifts (list):
                List of integer shifts between the files.
            mode (str, optional):
                Define the size of the output image as 'same' to the reference image or expanding to include the 'full'
                covered field. Default is 'same'.
            in_dir (str, optional):
                Path to the input files.
        """

        # Assert that there are the same number of inFiles and psfFiles, which should be the case after running the
        # holography function.
        if not len(in_files) == len(psf_files):
            raise ValueError(
                f"The number of input files ({len(in_files)}) and PSF files ({len(psf_files)}) do not "
                f"match!")
        self.in_files = in_files
        self.psf_files = psf_files
        self.shifts = shifts

        # Check whether mode is supported
        if mode not in ['same', 'full', 'valid']:
            raise SpecklepyValueError(
                'FourierObject',
                argname='mode',
                argvalue=mode,
                expected="either 'same', 'full', or 'valid'")
        self.mode = mode
        if in_dir is None:
            self.in_dir = ''
        else:
            self.in_dir = in_dir

        # Extract padding vectors for images and reference image
        logger.info("Initializing padding vectors")
        # files_contain_data_cubes = fits.getdata(in_files[0]).ndim == 3
        self.pad_vectors, self.reference_image_pad_vector = get_pad_vectors(
            shifts=shifts,
            cube_mode=False,
            return_reference_image_pad_vector=True)
        file_index = 0
        image_pad_vector = self.pad_vectors[file_index]

        # Get example image frame, used as final image size
        image_file = in_files[file_index]
        logger.info(f"\tUsing example image frame from {image_file}")
        img = fits.getdata(os.path.join(
            self.in_dir, image_file))[0]  # Remove time axis padding
        img = pad_array(
            array=img,
            pad_vector=image_pad_vector,
            mode=mode,
            reference_image_pad_vector=self.reference_image_pad_vector)
        logger.info(f"\tShift: {shifts[file_index]}")
        logger.info(f"\tShape: {img.shape}")

        # Get example PSF frame
        psf_file = psf_files[file_index]
        logger.info(f"\tUsing example PSF frame from {psf_file}")
        psf = fits.getdata(psf_file)[0]
        logger.info(f"\tShape: {psf.shape}")

        # Estimate the padding vector for the f_psf frames to have the same xy-extent as f_img
        dx = img.shape[0] - psf.shape[0]
        dy = img.shape[1] - psf.shape[1]
        psf_pad_vector = ((dx // 2, int(np.ceil(dx / 2))),
                          (dy // 2, int(np.ceil(dy / 2))))
        logger.info(f"\tPad_width for PSFs: {psf_pad_vector}")

        # Apply padding to PSF frame
        psf = np.pad(
            psf,
            psf_pad_vector,
            mode='constant',
        )
        if not img.shape == psf.shape:
            raise ValueError(
                f"The Fourier transformed images and PSFs have different shape, {img.shape} and "
                f"{psf.shape}. Something went wrong with the padding!")
        self.psf_pad_vector = psf_pad_vector

        # Initialize the enumerator, denominator and Fourier object attributes
        self.enumerator = np.zeros(img.shape, dtype='complex128')
        self.denominator = np.zeros(img.shape, dtype='complex128')
        self.fourier_image = np.zeros(img.shape, dtype='complex128')
Beispiel #16
0
def pad_array(array, pad_vector, mode='same', reference_image_pad_vector=None):
    """Pads an array according to the pad_vector and crops the image given the
    mode.

    Pads an array with zeros to match a desired field size. Intermediately, it always creates a 'full' image and only
    in 'same' mode it crops the edges such that the returned array covers only the field of the reference image.

    Args:
        array (np.ndarray):
            Input array that shall be padded to match the 'full' or 'same' fields.
        pad_vector (list):
            List of padding vectors, as obtained from get_pad_vectors().
        mode (str, optional):
            Define the size of the output image as 'same' to the reference image or expanding to include the 'full'
            covered field.
        reference_image_pad_vector (tuple or list, optional):
            Used in `same` mode to estimate the position of the reference image and crop beyond.

    Returns:
        padded (np.ndarray):
            Padded array, matching the field of the reference image in 'same'
            mode, or the complete field in 'full' mode.
    """

    # Check input parameters
    if isinstance(array, np.ndarray):
        if array.ndim not in [2, 3]:
            raise SpecklepyValueError('pad_array()',
                                      argname='array.ndim',
                                      argvalue=array.ndim,
                                      expected='2 or 3')
    else:
        raise SpecklepyTypeError('pad_array()',
                                 argname='array',
                                 argtype=type(array),
                                 expected='np.ndarray')

    #
    padded = np.pad(array, pad_vector, mode='constant')

    # Crop the image according to the desired mode
    if mode == 'same':
        # Take reference pad vector and adapt to correctly limit the image
        _r = reference_image_pad_vector
        # Pick only those pixels, covered by the reference image
        if array.ndim == 2:
            padded = padded[_r[0][0]:_adapt_max_coordinate(_r[0][1]),
                            _r[1][0]:_adapt_max_coordinate(_r[1][1])]
        else:
            padded = padded[:, _r[0][0]:_adapt_max_coordinate(_r[0][1]),
                            _r[1][0]:_adapt_max_coordinate(_r[1][1])]

    elif mode == 'full':
        # There is nothing to crop in 'full' mode
        pass

    elif mode == 'valid':
        raise NotImplementedError(
            "specklepy.core.alignment.pad_array does not support the 'valid' mode yet!"
        )

    return padded
Beispiel #17
0
def get_shift(image,
              reference_image=None,
              is_fourier_transformed=False,
              mode='correlation',
              debug=False):
    """Estimate the shift between an image and a reference image.

    Estimate the relative shift between an image and a reference image by means of a 2D correlation
    ('correlation' mode) or by comparison of the emission peaks ('peak' or 'maximum' modes).

    Args:
        image (np.ndarray):
            2D array of the image to be shifted.
        reference_image (np.ndarray):
            2D array of the reference image of the shift.
        is_fourier_transformed (bool):
            Indicate whether the reference image is already Fourier transformed. This is implemented to save
            computation by computing that transform only once.
        mode (str, optional):
            Mode of the shift estimate. In 'correlation' mode, a 2D correlation is used to estimate the shift of the
            array. This is computationally much more expensive than the identical 'maximum' or 'peak' modes, which
            simply identify the coordinates of the emission peaks and return the difference. Though these modes may be
            fooled by reference sources of similar brightness. Default is 'correlation'.
        debug (bool, optional):
            Set to True to inspect intermediate results. Default is False.

    Returns:
        shift (tuple):
            Tuple of shift indices for each axis.
    """

    # Check input parameters
    if not isinstance(image, np.ndarray) or image.ndim is not 2:
        raise TypeError(
            f"Image input must be 2D numpy.ndarray, but was provided as {type(image)}"
        )
    if not isinstance(reference_image, np.ndarray) or image.ndim is not 2:
        raise TypeError(
            f"Image input must be 2D numpy.ndarray, but was provided as {type(reference_image)}"
        )
    if not isinstance(is_fourier_transformed, bool):
        raise SpecklepyTypeError('get_shift()',
                                 argname='is_Fourier_transformed',
                                 argtype=type(is_fourier_transformed),
                                 expected='bool')
    if isinstance(mode, str):
        if mode not in ['correlation', 'maximum', 'peak']:
            raise SpecklepyValueError(
                'get_shift()',
                argname='mode',
                argvalue=mode,
                expected="'correlation', 'maximum' or 'peak'")
    else:
        raise SpecklepyTypeError('get_shift()',
                                 argname='mode',
                                 argtype=type(mode),
                                 expected='str')

    # Simple comparison of the peaks in the images
    if mode == 'maximum' or mode == 'peak':
        peak_image = np.unravel_index(np.argmax(image, axis=None), image.shape)
        peak_ref_image = np.unravel_index(
            np.argmax(reference_image, axis=None), reference_image.shape)
        return peak_ref_image[0] - peak_image[0], peak_ref_image[
            1] - peak_image[1]

    # Using correlation of the two images
    elif mode == 'correlation':
        # Get the Fourier transformed reference image for cross-correlation
        if not is_fourier_transformed:
            f_reference_image = np.fft.fft2(reference_image)
        else:
            f_reference_image = reference_image

        # Fourier transform the image
        f_image = np.conjugate(np.fft.fft2(image))

        # Compute the 2-dimensional correlation
        correlation = np.fft.ifft2(np.multiply(f_reference_image, f_image))
        correlation = np.fft.fftshift(correlation)
        if debug:
            imshow(np.abs(correlation), title='FFT shifted correlation')

        # Derive the shift from the correlation
        shift = np.unravel_index(np.argmax(correlation), correlation.shape)
        shift = tuple(x - int(correlation.shape[i] / 2)
                      for i, x in enumerate(shift))
        return shift
Beispiel #18
0
    def identify_sequences(self, source='sky'):
        """Identify observation sequences.

        Args:
            source (str, optional):
                Observation type of the images the shall be used to measure the sky background from. Options are 'sky'
                (default) and 'science'.

        Returns:
            sequences (list of Sequence):
                List of observing sequences.
        """

        # Type check
        if isinstance(source, str):
            if source not in ['sky', 'science']:
                raise SpecklepyValueError('identify sequences',
                                          argname='source',
                                          argvalue=source,
                                          expected="'sky' or 'science'")
        else:
            raise SpecklepyTypeError('identify sequences',
                                     argname='source',
                                     argtype=type(source),
                                     expected='str')

        # Identify the observing sequences
        sequences = []
        for setup in self.setups:
            for object in self.objects:
                # Query names and time stamps of science and sky files
                sky_files = self.filter({
                    'OBSTYPE': source.upper(),
                    'OBJECT': object,
                    'SETUP': setup
                })
                sky_time_stamps = self.filter(
                    {
                        'OBSTYPE': source.upper(),
                        'OBJECT': object,
                        'SETUP': setup
                    },
                    namekey='DATE')
                science_files = self.filter({
                    'OBSTYPE': 'SCIENCE',
                    'OBJECT': object,
                    'SETUP': setup
                })
                science_time_stamps = self.filter(
                    {
                        'OBSTYPE': 'SCIENCE',
                        'OBJECT': object,
                        'SETUP': setup
                    },
                    namekey='DATE')

                # Test the number of source files
                if len(sky_files) == 0:
                    logger.warning(
                        f"Did not find any sky observations for object {object} in setup {setup}. No sky "
                        f"subtraction will be applied!")
                else:
                    # Store the information in a new sequence
                    sequences.append(
                        Sequence(sky_files=sky_files,
                                 science_files=science_files,
                                 file_path=self.in_dir,
                                 sky_time_stamps=sky_time_stamps,
                                 science_time_stamps=science_time_stamps,
                                 source=source,
                                 object=object,
                                 setup=setup))
        return sequences
Beispiel #19
0
    def __init__(self, in_files, mode='same', reference_image=None, out_file=None, in_dir=None, tmp_dir=None,
                 alignment_method='collapse', var_ext=None, box_indexes=None, debug=False):
        """Create a Reconstruction instance.

        Args:
            in_files (list):
                List of input data cubes.
            mode (str, optional):
                Reconstruction mode, defines the final image size and can be `full`, `same` and `valid`. The final image
                sizes is derived as follows:
                - `full`:
                    The reconstruction image covers every patch of the sky that is covered by at least one frame in the
                    input data.
                - `same`:
                    The reconstruction image covers the same field of view as the image in the reference file.
                - `valid`:
                    The reconstruction image covers only that field that is covered by all images in the input files.
            reference_image (int or str, optional):
                The index in the `in_files` list or the name of the image serving as reference in 'same' mode.
            out_file (str, optional):
                Name of an output file to store the reconstructed image in.
            in_dir (str, optional):
                Path to the `in_files`.
            tmp_dir (str, optional):
                Path to the directory for storing temporary products.
            debug (bool, optional):
                Show debugging information.
        """

        # Check input parameter types
        if not isinstance(in_files, (list, np.ndarray)):
            raise SpecklepyTypeError('Reconstruction', 'in_files', type(in_files), 'list')
        if not isinstance(mode, str):
            raise SpecklepyTypeError('Reconstruction', 'mode', type(mode), 'str')
        if out_file is not None and not isinstance(out_file, str):
            raise SpecklepyTypeError('Reconstruction', 'out_file', type(out_file), 'str')

        # Check input parameter values
        if mode not in self.supported_modes:
            raise SpecklepyValueError('Reconstruction', 'mode', mode, f"in {self.supported_modes}")

        # Store input data
        self.in_files = in_files
        self.mode = mode
        self.out_file = out_file if out_file is not None else 'reconstruction.fits'
        self.reference_image = reference_image if reference_image is not None else 0
        self.in_dir = in_dir if in_dir is not None else ''
        self.tmp_dir = tmp_dir if tmp_dir is not None else ''
        self.var_ext = var_ext  # if var_ext is not None else 'VAR'
        self.box = Box(box_indexes) if box_indexes is not None else None

        # Retrieve name of reference file
        self.reference_file = self.identify_reference_file()

        # Derive shape of individual input frames
        single_cube_mode = len(self.in_files) == 1
        example_frame = fits.getdata(os.path.join(in_dir, self.in_files[0]))
        if example_frame.ndim == 3:
            example_frame = example_frame[0]
        self.frame_shape = example_frame.shape

        # Initialize image
        if single_cube_mode:
            self.image = np.zeros(self.frame_shape)
            self.shifts = (0, 0)
        else:
            # Compute SSA reconstructions of cubes or collapse cubes for initial alignments
            self.long_exp_files = self.create_long_exposures(alignment_method=alignment_method)

            # Identify reference tmp file
            self.reference_tmp_file = self.identify_reference_long_exposure_file()

            # Estimate relative shifts
            self.shifts = alignment.get_shifts(files=self.long_exp_files, reference_file=self.reference_tmp_file,
                                               lazy_mode=True, return_image_shape=False, in_dir=tmp_dir, debug=debug)

            # Derive corresponding padding vectors
            self.pad_vectors, self.reference_pad_vector = \
                alignment.get_pad_vectors(shifts=self.shifts, cube_mode=False, return_reference_image_pad_vector=True)

            # Derive corresponding image sizes
            self.image = self.initialize_image()

        # Initialize the variance map
        self.var = np.zeros(self.image.shape) if self.var_ext is not None else None

        # Initialize output file and create an extension for the variance
        self.out_file = ReconstructionFile(files=self.in_files, filename=self.out_file, shape=self.image.shape,
                                      in_dir=in_dir, cards={"RECONSTRUCTION": "SSA"})
        if self.var is not None:
            self.out_file.new_extension(name=self.var_ext, data=self.var)