Exemplo n.º 1
0
    def _run_single_processing(self, star_reshape, im_shape, indices):
        """
        Internal function to create the residuals, derotate the images, and write the output
        using a single process.

        Returns
        -------
        NoneType
            None
        """

        for i, pca_number in enumerate(self.m_components):
            progress(i, len(self.m_components), "Creating residuals...")

            parang = -1. * self.m_star_in_port.get_attribute(
                "PARANG") + self.m_extra_rot

            residuals, res_rot = pca_psf_subtraction(images=star_reshape,
                                                     angles=parang,
                                                     pca_number=pca_number,
                                                     pca_sklearn=self.m_pca,
                                                     im_shape=im_shape,
                                                     indices=indices)

            hist = "max PC number = " + str(np.amax(self.m_components))

            # 1.) derotated residuals
            if self.m_res_arr_out_ports is not None:
                self.m_res_arr_out_ports[pca_number].set_all(res_rot)
                self.m_res_arr_out_ports[pca_number].copy_attributes(
                    self.m_star_in_port)
                self.m_res_arr_out_ports[pca_number].add_history(
                    "PcaPsfSubtractionModule", hist)

            # 2.) mean residuals
            if self.m_res_mean_out_port is not None:
                stack = combine_residuals(method="mean", res_rot=res_rot)
                self.m_res_mean_out_port.append(stack, data_dim=3)

            # 3.) median residuals
            if self.m_res_median_out_port is not None:
                stack = combine_residuals(method="median", res_rot=res_rot)
                self.m_res_median_out_port.append(stack, data_dim=3)

            # 4.) noise-weighted residuals
            if self.m_res_weighted_out_port is not None:
                stack = combine_residuals(method="weighted",
                                          res_rot=res_rot,
                                          residuals=residuals,
                                          angles=parang)

                self.m_res_weighted_out_port.append(stack, data_dim=3)

            # 5.) clipped mean residuals
            if self.m_res_rot_mean_clip_out_port is not None:
                stack = combine_residuals(method="clipped", res_rot=res_rot)
                self.m_res_rot_mean_clip_out_port.append(stack, data_dim=3)

        sys.stdout.write("Creating residuals... [DONE]\n")
        sys.stdout.flush()
Exemplo n.º 2
0
    def run_job(self, tmp_task: TaskInput) -> TaskResult:
        """
        Run method of PcaTaskProcessor.

        Parameters
        ----------
        tmp_task : pynpoint.util.multiproc.TaskInput
            Input task.

        Returns
        -------
        pynpoint.util.multiproc.TaskResult
            Output residuals.
        """

        # correct data type of pca_number if necessary
        if isinstance(tmp_task.m_input_data, tuple):
            pca_number = tmp_task.m_input_data
        else:
            pca_number = int(tmp_task.m_input_data)

        residuals, res_rot = postprocessor(
            images=self.m_star_reshape,
            angles=self.m_angles,
            scales=self.m_scales,
            pca_number=pca_number,
            pca_sklearn=self.m_pca_model,
            im_shape=self.m_im_shape,
            indices=self.m_indices,
            processing_type=self.m_processing_type)

        # differentiate between IFS data or Mono-Wavelength data
        if res_rot.ndim == 3:
            res_output = np.zeros((4, res_rot.shape[-2], res_rot.shape[-1]))

        else:
            res_output = np.zeros((4, len(self.m_star_reshape),
                                   res_rot.shape[-2], res_rot.shape[-1]))

        if self.m_requirements[0]:
            res_output[0, ] = combine_residuals(method='mean', res_rot=res_rot)

        if self.m_requirements[1]:
            res_output[1, ] = combine_residuals(method='median',
                                                res_rot=res_rot)

        if self.m_requirements[2]:
            res_output[2, ] = combine_residuals(method='weighted',
                                                res_rot=res_rot,
                                                residuals=residuals,
                                                angles=self.m_angles)

        if self.m_requirements[3]:
            res_output[3, ] = combine_residuals(method='clipped',
                                                res_rot=res_rot)

        sys.stdout.write('.')
        sys.stdout.flush()

        return TaskResult(res_output, tmp_task.m_job_parameter[0])
Exemplo n.º 3
0
    def _run_single_processing(self, star_reshape: np.ndarray,
                               im_shape: Tuple[int, int, int],
                               indices: np.ndarray) -> None:
        """
        Internal function to create the residuals, derotate the images, and write the output
        using a single process.
        """

        start_time = time.time()

        for i, pca_number in enumerate(self.m_components):
            progress(i, len(self.m_components), 'Creating residuals...',
                     start_time)

            parang = -1. * self.m_star_in_port.get_attribute(
                'PARANG') + self.m_extra_rot

            residuals, res_rot = pca_psf_subtraction(
                images=star_reshape,
                angles=parang,
                pca_number=int(pca_number),
                pca_sklearn=self.m_pca,
                im_shape=im_shape,
                indices=indices)

            hist = f'max PC number = {np.amax(self.m_components)}'

            # 1.) derotated residuals
            if self.m_res_arr_out_ports is not None:
                self.m_res_arr_out_ports[pca_number].set_all(res_rot)
                self.m_res_arr_out_ports[pca_number].copy_attributes(
                    self.m_star_in_port)
                self.m_res_arr_out_ports[pca_number].add_history(
                    'PcaPsfSubtractionModule', hist)

            # 2.) mean residuals
            if self.m_res_mean_out_port is not None:
                stack = combine_residuals(method='mean', res_rot=res_rot)
                self.m_res_mean_out_port.append(stack, data_dim=3)

            # 3.) median residuals
            if self.m_res_median_out_port is not None:
                stack = combine_residuals(method='median', res_rot=res_rot)
                self.m_res_median_out_port.append(stack, data_dim=3)

            # 4.) noise-weighted residuals
            if self.m_res_weighted_out_port is not None:
                stack = combine_residuals(method='weighted',
                                          res_rot=res_rot,
                                          residuals=residuals,
                                          angles=parang)

                self.m_res_weighted_out_port.append(stack, data_dim=3)

            # 5.) clipped mean residuals
            if self.m_res_rot_mean_clip_out_port is not None:
                stack = combine_residuals(method='clipped', res_rot=res_rot)
                self.m_res_rot_mean_clip_out_port.append(stack, data_dim=3)
Exemplo n.º 4
0
    def _lnlike():
        """
        Internal function for the log likelihood function. Noise of each pixel is assumed to follow
        either a Poisson distribution (see Wertz et al. 2017) or a Gaussian distribution with a
        correction for small sample statistics (see Mawet et al. 2014).

        Returns
        -------
        float
            Log likelihood.
        """

        sep, ang, mag = param

        fake = fake_planet(images=images,
                           psf=psf,
                           parang=parang - extra_rot,
                           position=(sep / pixscale, ang),
                           magnitude=mag,
                           psf_scaling=psf_scaling)

        _, im_res = pca_psf_subtraction(images=fake * mask,
                                        angles=-1. * parang + extra_rot,
                                        pca_number=pca_number,
                                        indices=indices)

        stack = combine_residuals(method=residuals, res_rot=im_res)

        merit = merit_function(residuals=stack[0, ],
                               function='sum',
                               variance=variance,
                               aperture=aperture,
                               sigma=0.)

        return -0.5 * merit
Exemplo n.º 5
0
    def _lnlike():
        """
        Internal function for the log likelihood function.

        Returns
        -------
        float
            Log likelihood.
        """

        sep, ang, mag = param

        fake = fake_planet(images=images,
                           psf=psf,
                           parang=parang - extra_rot,
                           position=(sep / pixscale, ang),
                           magnitude=mag,
                           psf_scaling=psf_scaling)

        _, im_res = pca_psf_subtraction(images=fake * mask,
                                        angles=-1. * parang + extra_rot,
                                        pca_number=pca_number,
                                        indices=indices)

        res_stack = combine_residuals(method=residuals, res_rot=im_res)

        chi_square = merit_function(residuals=res_stack[0, ],
                                    merit=merit,
                                    aperture=aperture,
                                    sigma=0.)

        return -0.5 * chi_square
Exemplo n.º 6
0
    def run_job(self, tmp_task):
        """
        Run method of PcaTaskProcessor.

        Parameters
        ----------
        tmp_task : pynpoint.util.multiproc.TaskInput
            Input task.

        Returns
        -------
        pynpoint.util.multiproc.TaskResult
            Output residuals.
        """

        residuals, res_rot = pca_psf_subtraction(
            images=self.m_star_reshape,
            angles=self.m_angles,
            pca_number=tmp_task.m_input_data,
            pca_sklearn=self.m_pca_model,
            im_shape=self.m_im_shape,
            indices=self.m_indices)

        res_output = np.zeros((4, res_rot.shape[1], res_rot.shape[2]))

        if self.m_requirements[0]:
            res_output[0, ] = combine_residuals(method="mean", res_rot=res_rot)

        if self.m_requirements[1]:
            res_output[1, ] = combine_residuals(method="median",
                                                res_rot=res_rot)

        if self.m_requirements[2]:
            res_output[2, ] = combine_residuals(method="weighted",
                                                res_rot=res_rot,
                                                residuals=residuals,
                                                angles=self.m_angles)

        if self.m_requirements[3]:
            res_output[3, ] = combine_residuals(method="clipped",
                                                res_rot=res_rot)

        sys.stdout.write('.')
        sys.stdout.flush()

        return TaskResult(res_output, tmp_task.m_job_parameter[0])
Exemplo n.º 7
0
        def _objective(arg):
            sys.stdout.write('.')
            sys.stdout.flush()

            pos_y = arg[0]
            pos_x = arg[1]
            mag = arg[2]

            sep_ang = cartesian_to_polar(center, pos_y, pos_x)

            fake = fake_planet(images=images,
                               psf=psf,
                               parang=parang,
                               position=(sep_ang[0], sep_ang[1]),
                               magnitude=mag,
                               psf_scaling=self.m_psf_scaling)

            mask = create_mask(fake.shape[-2:], (self.m_cent_size, self.m_edge_size))

            if self.m_reference_in_port is None:
                _, im_res = pca_psf_subtraction(images=fake*mask,
                                                angles=-1.*parang+self.m_extra_rot,
                                                pca_number=self.m_pca_number,
                                                pca_sklearn=None,
                                                im_shape=None,
                                                indices=None)

            else:
                im_reshape = np.reshape(fake*mask, (im_shape[0], im_shape[1]*im_shape[2]))

                _, im_res = pca_psf_subtraction(images=im_reshape,
                                                angles=-1.*parang+self.m_extra_rot,
                                                pca_number=self.m_pca_number,
                                                pca_sklearn=sklearn_pca,
                                                im_shape=im_shape,
                                                indices=None)

            res_stack = combine_residuals(method=self.m_residuals, res_rot=im_res)

            self.m_res_out_port.append(res_stack, data_dim=3)

            chi_square = merit_function(residuals=res_stack[0, ],
                                        merit=self.m_merit,
                                        aperture=aperture,
                                        sigma=self.m_sigma)

            position = rotate_coordinates(center, (pos_y, pos_x), -self.m_extra_rot)

            res = np.asarray([position[1],
                              position[0],
                              sep_ang[0]*pixscale,
                              (sep_ang[1]-self.m_extra_rot) % 360.,
                              mag,
                              chi_square])

            self.m_flux_position_port.append(res, data_dim=2)

            return chi_square
Exemplo n.º 8
0
    def gaussian_noise(self,
                       images,
                       psf,
                       parang,
                       aperture):
        """
        Function to compute the (constant) variance for the likelihood function when the
        variance parameter is set to gaussian (see Mawet et al. 2014). The planet is first removed
        from the dataset with the values specified as *param* in the constructor of the instance.

        Parameters
        ----------
        images : numpy.ndarray
            Input images.
        psf : numpy.ndarray
            PSF template.
        parang : numpy.ndarray
            Parallactic angles (deg).
        aperture : dict
            Properties of the circular aperture. The radius is recommended to be larger than or
            equal to 0.5*lambda/D.

        Returns
        -------
        float
            Variance.
        """

        pixscale = self.m_image_in_port.get_attribute("PIXSCALE")

        fake = fake_planet(images=images,
                           psf=psf,
                           parang=parang,
                           position=(self.m_param[0]/pixscale, self.m_param[1]),
                           magnitude=self.m_param[2],
                           psf_scaling=self.m_psf_scaling)

        _, res_arr = pca_psf_subtraction(images=fake,
                                         angles=-1.*parang+self.m_extra_rot,
                                         pca_number=self.m_pca_number)

        stack = combine_residuals(method=self.m_residuals, res_rot=res_arr)

        _, noise, _, _ = false_alarm(image=stack[0, ],
                                     x_pos=aperture['pos_x'],
                                     y_pos=aperture['pos_y'],
                                     size=aperture['radius'],
                                     ignore=False)

        return noise**2
Exemplo n.º 9
0
        def _objective(arg):
            sys.stdout.write('.')
            sys.stdout.flush()

            pos_y = arg[0]
            pos_x = arg[1]
            mag = arg[2]

            sep = math.sqrt((pos_y - center[0])**2 + (pos_x - center[1])**2)
            ang = math.atan2(pos_y - center[0],
                             pos_x - center[1]) * 180. / math.pi - 90.

            fake = fake_planet(images=images,
                               psf=psf,
                               parang=parang,
                               position=(sep, ang),
                               magnitude=mag,
                               psf_scaling=self.m_psf_scaling)

            im_shape = (fake.shape[-2], fake.shape[-1])

            mask = create_mask(im_shape, [self.m_cent_size, self.m_edge_size])

            _, im_res = pca_psf_subtraction(images=fake * mask,
                                            angles=-1. * parang +
                                            self.m_extra_rot,
                                            pca_number=self.m_pca_number)

            stack = combine_residuals(method=self.m_residuals, res_rot=im_res)

            self.m_res_out_port.append(stack, data_dim=3)

            merit = merit_function(residuals=stack,
                                   function=self.m_merit,
                                   variance="poisson",
                                   aperture=self.m_aperture,
                                   sigma=self.m_sigma)

            position = rotate_coordinates(center, (pos_y, pos_x),
                                          -self.m_extra_rot)

            res = np.asarray((position[1], position[0], sep * pixscale,
                              (ang - self.m_extra_rot) % 360., mag, merit))

            self.m_flux_position_port.append(res, data_dim=2)

            return merit
Exemplo n.º 10
0
    def run(self) -> None:
        """
        Run method of the module. Selects for each image the reference images closest in line while
        taking into account a rotation threshold for a fixed separation, median-combines the
        references images, and subtracts the reference image from each image separately.
        Alternatively, a single, median-combined reference image can be created and subtracted from
        all images. All images are used if the rotation condition can not be met. Both the
        individual residuals (before derotation) and the stacked residuals are stored.

        Returns
        -------
        NoneType
            None
        """
        @typechecked
        def _subtract_psf(
                image: np.ndarray,
                parang_thres: Optional[float],
                nref: Optional[int],
                reference: Optional[np.ndarray] = None) -> np.ndarray:

            if parang_thres:
                ang_diff = np.abs(parang[self.m_count] - parang)
                index_thres = np.where(ang_diff > parang_thres)[0]

                if index_thres.size == 0:
                    reference = self.m_image_in_port.get_all()
                    warnings.warn(
                        'No images meet the rotation threshold. Creating a reference '
                        'PSF from the median of all images instead.')

                else:
                    if nref:
                        index_diff = np.abs(self.m_count - index_thres)
                        index_near = np.argsort(index_diff)[:nref]
                        index_sort = np.sort(index_thres[index_near])
                        reference = self.m_image_in_port[index_sort, :, :]

                    else:
                        reference = self.m_image_in_port[index_thres, :, :]

                reference = np.median(reference, axis=0)

            self.m_count += 1

            return image - reference

        parang = -1. * self.m_image_in_port.get_attribute(
            'PARANG') + self.m_extra_rot

        if self.m_threshold:
            parang_thres = 2. * math.atan2(
                self.m_threshold[2] * self.m_threshold[1],
                2. * self.m_threshold[0])
            parang_thres = math.degrees(parang_thres)
            reference = None

        else:
            parang_thres = None
            reference = self.m_image_in_port.get_all()
            reference = np.median(reference, axis=0)

        self.apply_function_to_images(_subtract_psf,
                                      self.m_image_in_port,
                                      self.m_res_out_port,
                                      'Classical ADI',
                                      func_args=(parang_thres,
                                                 self.m_nreference, reference))

        self.m_res_in_port = self.add_input_port(self.m_res_out_port._m_tag)
        im_res = self.m_res_in_port.get_all()

        res_rot = np.zeros(im_res.shape)
        for i, item in enumerate(parang):
            res_rot[i, ] = rotate(im_res[i, ], item, reshape=False)

        stack = combine_residuals(self.m_residuals,
                                  res_rot,
                                  residuals=im_res,
                                  angles=parang)

        self.m_stack_out_port.set_all(stack)

        if self.m_threshold:
            history = f'threshold [deg] = {parang_thres:.2f}'
        else:
            history = 'threshold [deg] = None'

        self.m_res_out_port.copy_attributes(self.m_image_in_port)
        self.m_res_out_port.add_history('ClassicalADIModule', history)

        self.m_stack_out_port.copy_attributes(self.m_image_in_port)
        self.m_stack_out_port.add_history('ClassicalADIModule', history)

        self.m_res_out_port.close_port()
Exemplo n.º 11
0
    def run(self):
        """
        Run method of the module. An artificial planet is injected (based on the noise level) at a
        given separation and position angle. The amount of self-subtraction is then determined and
        the contrast limit is calculated for a given sigma level or false positive fraction. A
        correction for small sample statistics is applied for both cases. Note that if the sigma
        level is fixed, the false positive fraction changes with separation, following the
        Student's t-distribution (see Mawet et al. 2014 for details).

        Returns
        -------
        NoneType
            None
        """

        images = self.m_image_in_port.get_all()
        psf = self.m_psf_in_port.get_all()

        if psf.shape[0] != 1 and psf.shape[0] != images.shape[0]:
            raise ValueError('The number of frames in psf_in_tag {0} does not match with the '
                             'number of frames in image_in_tag {1}. The DerotateAndStackModule can '
                             'be used to average the PSF frames (without derotating) before '
                             'applying the ContrastCurveModule.'.format(psf.shape, images.shape))

        cpu = self._m_config_port.get_attribute("CPU")
        parang = self.m_image_in_port.get_attribute("PARANG")
        pixscale = self.m_image_in_port.get_attribute("PIXSCALE")

        if self.m_cent_size is not None:
            self.m_cent_size /= pixscale

        if self.m_edge_size is not None:
            self.m_edge_size /= pixscale

        self.m_aperture /= pixscale

        pos_r = np.arange(self.m_separation[0]/pixscale,
                          self.m_separation[1]/pixscale,
                          self.m_separation[2]/pixscale)

        pos_t = np.arange(self.m_angle[0]+self.m_extra_rot,
                          self.m_angle[1]+self.m_extra_rot,
                          self.m_angle[2])

        if self.m_cent_size is None:
            index_del = np.argwhere(pos_r-self.m_aperture <= 0.)
        else:
            index_del = np.argwhere(pos_r-self.m_aperture <= self.m_cent_size)

        pos_r = np.delete(pos_r, index_del)

        if self.m_edge_size is None or self.m_edge_size > images.shape[1]/2.:
            index_del = np.argwhere(pos_r+self.m_aperture >= images.shape[1]/2.)
        else:
            index_del = np.argwhere(pos_r+self.m_aperture >= self.m_edge_size)

        pos_r = np.delete(pos_r, index_del)

        sys.stdout.write("Running ContrastCurveModule...\r")
        sys.stdout.flush()

        positions = []
        for sep in pos_r:
            for ang in pos_t:
                positions.append((sep, ang))

        # Create a queue object which will contain the results
        queue = mp.Queue()

        result = []
        jobs = []

        working_place = self._m_config_port.get_attribute("WORKING_PLACE")

        # Create temporary files
        tmp_im_str = os.path.join(working_place, "tmp_images.npy")
        tmp_psf_str = os.path.join(working_place, "tmp_psf.npy")

        np.save(tmp_im_str, images)
        np.save(tmp_psf_str, psf)

        mask = create_mask(images.shape[-2:], [self.m_cent_size, self.m_edge_size])

        _, im_res = pca_psf_subtraction(images=images*mask,
                                        angles=-1.*parang+self.m_extra_rot,
                                        pca_number=self.m_pca_number)

        noise = combine_residuals(method=self.m_residuals, res_rot=im_res)

        for i, pos in enumerate(positions):
            process = mp.Process(target=contrast_limit,
                                 args=(tmp_im_str,
                                       tmp_psf_str,
                                       noise,
                                       mask,
                                       parang,
                                       self.m_psf_scaling,
                                       self.m_extra_rot,
                                       self.m_pca_number,
                                       self.m_threshold,
                                       self.m_aperture,
                                       self.m_residuals,
                                       self.m_snr_inject,
                                       pos,
                                       queue),
                                 name=(str(os.path.basename(__file__)) + '_radius=' +
                                       str(np.round(pos[0]*pixscale, 1)) + '_angle=' +
                                       str(np.round(pos[1], 1))))

            jobs.append(process)

        for i, job in enumerate(jobs):
            job.start()

            if (i+1)%cpu == 0:
                # Start *cpu* number of processes. Wait for them to finish and start again *cpu*
                # number of processes.

                for k in jobs[i+1-cpu:(i+1)]:
                    k.join()

            elif (i+1) == len(jobs) and (i+1)%cpu != 0:
                # Wait for the last processes to finish if number of processes is not a multiple
                # of *cpu*

                for k in jobs[(i + 1 - (i+1)%cpu):]:
                    k.join()

            progress(i, len(jobs), "Running ContrastCurveModule...")

        # Send termination sentinel to queue
        queue.put(None)

        while True:
            item = queue.get()

            if item is None:
                break
            else:
                result.append(item)

        os.remove(tmp_im_str)
        os.remove(tmp_psf_str)

        result = np.asarray(result)

        # Sort the results first by separation and then by angle
        indices = np.lexsort((result[:, 1], result[:, 0]))
        result = result[indices]

        result = result.reshape((pos_r.size, pos_t.size, 4))

        mag_mean = np.nanmean(result, axis=1)[:, 2]
        mag_var = np.nanvar(result, axis=1)[:, 2]
        res_fpf = result[:, 0, 3]

        limits = np.column_stack((pos_r*pixscale, mag_mean, mag_var, res_fpf))

        self.m_contrast_out_port.set_all(limits, data_dim=2)

        sys.stdout.write("\rRunning ContrastCurveModule... [DONE]\n")
        sys.stdout.flush()

        history = str(self.m_threshold[0])+" = "+str(self.m_threshold[1])

        self.m_contrast_out_port.add_history("ContrastCurveModule", history)
        self.m_contrast_out_port.copy_attributes(self.m_image_in_port)
        self.m_contrast_out_port.close_port()
Exemplo n.º 12
0
    def _run_single_processing(self, star_reshape: np.ndarray, im_shape: tuple,
                               indices: Optional[np.ndarray]) -> None:
        """
        Internal function to create the residuals, derotate the images, and write the output
        using a single process.
        """

        start_time = time.time()

        # Get the parallactic angles
        parang = -1. * self.m_star_in_port.get_attribute(
            'PARANG') + self.m_extra_rot

        if self.m_ifs_data:
            # Get the wavelengths
            if 'WAVELENGTH' in self.m_star_in_port.get_all_non_static_attributes(
            ):
                wavelength = self.m_star_in_port.get_attribute('WAVELENGTH')

            else:
                raise ValueError(
                    'The wavelengths are not found. These should be stored '
                    'as the \'WAVELENGTH\' attribute.')

            # Calculate the wavelength ratios
            scales = scaling_factors(wavelength)

        else:
            scales = None

        if self.m_processing_type in ['ADI', 'SDI']:
            pca_first = self.m_components
            pca_secon = [-1]  # Not used

        elif self.m_processing_type in ['SDI+ADI', 'ADI+SDI']:
            pca_first = self.m_components[0]
            pca_secon = self.m_components[1]

        # Setup output arrays

        out_array_res = np.zeros(im_shape)

        if self.m_ifs_data:
            if self.m_processing_type in ['ADI', 'SDI']:
                res_shape = (len(pca_first), len(wavelength), im_shape[-2],
                             im_shape[-1])

            elif self.m_processing_type in ['SDI+ADI', 'ADI+SDI']:
                res_shape = (len(pca_first), len(pca_secon), len(wavelength),
                             im_shape[-2], im_shape[-1])

        else:
            res_shape = (len(pca_first), im_shape[-2], im_shape[-1])

        out_array_mean = np.zeros(res_shape)
        out_array_medi = np.zeros(res_shape)
        out_array_weig = np.zeros(res_shape)
        out_array_clip = np.zeros(res_shape)

        # loop over all different combination of pca_numbers and applying the reductions
        for i, pca_1 in enumerate(pca_first):
            for j, pca_2 in enumerate(pca_secon):
                progress(i + j,
                         len(pca_first) + len(pca_secon),
                         'Creating residuals...', start_time)

                # process images
                residuals, res_rot = postprocessor(
                    images=star_reshape,
                    angles=parang,
                    scales=scales,
                    pca_number=(pca_1, pca_2),
                    pca_sklearn=self.m_pca,
                    im_shape=im_shape,
                    indices=indices,
                    processing_type=self.m_processing_type)

                # 1.) derotated residuals
                if self.m_res_arr_out_ports is not None:
                    if not self.m_ifs_data:
                        self.m_res_arr_out_ports[pca_1].set_all(res_rot)
                        self.m_res_arr_out_ports[pca_1].copy_attributes(
                            self.m_star_in_port)
                        self.m_res_arr_out_ports[pca_1].add_history(
                            'PcaPsfSubtractionModule',
                            f'max PC number = {pca_first}')

                    else:
                        out_array_res = residuals

                # 2.) mean residuals
                if self.m_res_mean_out_port is not None:
                    if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']:
                        out_array_mean[i,
                                       j] = combine_residuals(method='mean',
                                                              res_rot=res_rot,
                                                              angles=parang)

                    else:
                        out_array_mean[i] = combine_residuals(method='mean',
                                                              res_rot=res_rot,
                                                              angles=parang)

                # 3.) median residuals
                if self.m_res_median_out_port is not None:
                    if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']:
                        out_array_medi[i,
                                       j] = combine_residuals(method='median',
                                                              res_rot=res_rot,
                                                              angles=parang)

                    else:
                        out_array_medi[i] = combine_residuals(method='median',
                                                              res_rot=res_rot,
                                                              angles=parang)

                # 4.) noise-weighted residuals
                if self.m_res_weighted_out_port is not None:
                    if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']:
                        out_array_weig[i, j] = combine_residuals(
                            method='weighted',
                            res_rot=res_rot,
                            residuals=residuals,
                            angles=parang)

                    else:
                        out_array_weig[i] = combine_residuals(
                            method='weighted',
                            res_rot=res_rot,
                            residuals=residuals,
                            angles=parang)

                # 5.) clipped mean residuals
                if self.m_res_rot_mean_clip_out_port is not None:
                    if self.m_processing_type in ['SDI+ADI', 'ADI+SDI']:
                        out_array_clip[i,
                                       j] = combine_residuals(method='clipped',
                                                              res_rot=res_rot,
                                                              angles=parang)

                    else:
                        out_array_clip[i] = combine_residuals(method='clipped',
                                                              res_rot=res_rot,
                                                              angles=parang)

        # Configurate data output according to the processing type
        # 1.) derotated residuals
        if self.m_res_arr_out_ports is not None and self.m_ifs_data:
            if pca_secon[0] == -1:
                history = f'max PC number = {pca_first}'

            else:
                history = f'max PC number = {pca_first} / {pca_secon}'

            # squeeze out_array_res to reduce dimensionallity as the residuals of
            # SDI+ADI and ADI+SDI are always of the form (1, 1, ...)
            squeezed = np.squeeze(out_array_res)

            if isinstance(self.m_components, tuple):
                self.m_res_arr_out_ports.set_all(squeezed,
                                                 data_dim=squeezed.ndim)
                self.m_res_arr_out_ports.copy_attributes(self.m_star_in_port)
                self.m_res_arr_out_ports.add_history('PcaPsfSubtractionModule',
                                                     history)

            else:
                for i, pca in enumerate(self.m_components):
                    self.m_res_arr_out_ports[pca].append(squeezed[i])
                    self.m_res_arr_out_ports[pca].add_history(
                        'PcaPsfSubtractionModule', history)

        # 2.) mean residuals
        if self.m_res_mean_out_port is not None:
            self.m_res_mean_out_port.set_all(out_array_mean,
                                             data_dim=out_array_mean.ndim)

        # 3.) median residuals
        if self.m_res_median_out_port is not None:
            self.m_res_median_out_port.set_all(out_array_medi,
                                               data_dim=out_array_medi.ndim)

        # 4.) noise-weighted residuals
        if self.m_res_weighted_out_port is not None:
            self.m_res_weighted_out_port.set_all(out_array_weig,
                                                 data_dim=out_array_weig.ndim)

        # 5.) clipped mean residuals
        if self.m_res_rot_mean_clip_out_port is not None:
            self.m_res_rot_mean_clip_out_port.set_all(
                out_array_clip, data_dim=out_array_clip.ndim)
Exemplo n.º 13
0
def contrast_limit(path_images, path_psf, noise, mask, parang, psf_scaling,
                   extra_rot, pca_number, threshold, aperture, residuals,
                   snr_inject, position):
    """
    Function for calculating the contrast limit at a specified position for a given sigma level or
    false positive fraction, both corrected for small sample statistics.

    Parameters
    ----------
    path_images : str
        System location of the stack of images (3D).
    path_psf : str
        System location of the PSF template for the fake planet (3D). Either a single image or a
        stack of images equal in size to science data.
    noise : numpy.ndarray
        Residuals of the PSF subtraction (3D) without injection of fake planets. Used to measure
        the noise level with a correction for small sample statistics.
    mask : numpy.ndarray
        Mask (2D).
    parang : numpy.ndarray
        Derotation angles (deg).
    psf_scaling : float
        Additional scaling factor of the planet flux (e.g., to correct for a neutral density
        filter). Should have a positive value.
    extra_rot : float
        Additional rotation angle of the images in clockwise direction (deg).
    pca_number : int
        Number of principal components used for the PSF subtraction.
    threshold : tuple(str, float)
        Detection threshold for the contrast curve, either in terms of "sigma" or the false
        positive fraction (FPF). The value is a tuple, for example provided as ("sigma", 5.) or
        ("fpf", 1e-6). Note that when sigma is fixed, the false positive fraction will change with
        separation. Also, sigma only corresponds to the standard deviation of a normal distribution
        at large separations (i.e., large number of samples).
    aperture : float
        Aperture radius (pix) for the calculation of the false positive fraction.
    residuals : str
        Method used for combining the residuals ("mean", "median", "weighted", or "clipped").
    position : tuple(float, float)
        The separation (pix) and position angle (deg) of the fake planet.
    snr_inject : float
        Signal-to-noise ratio of the injected planet signal that is used to measure the amount
        of self-subtraction.

    Returns
    -------
    NoneType
        None
    """

    images = np.load(path_images)
    psf = np.load(path_psf)

    if threshold[0] == "sigma":
        sigma = threshold[1]

        # Calculate the FPF for a given sigma level
        fpf = student_t(t_input=threshold,
                        radius=position[0],
                        size=aperture,
                        ignore=False)

    elif threshold[0] == "fpf":
        fpf = threshold[1]

        # Calculate the sigma level for a given FPF
        sigma = student_t(t_input=threshold,
                          radius=position[0],
                          size=aperture,
                          ignore=False)

    else:
        raise ValueError("Threshold type not recognized.")

    # Cartesian coordinates of the fake planet
    xy_fake = polar_to_cartesian(images, position[0], position[1] - extra_rot)

    # Determine the noise level
    _, t_noise, _, _ = false_alarm(image=noise[0, ],
                                   x_pos=xy_fake[0],
                                   y_pos=xy_fake[1],
                                   size=aperture,
                                   ignore=False)

    # Aperture properties
    im_center = center_subpixel(images)
    ap_dict = {
        'type': 'circular',
        'pos_x': im_center[1],
        'pos_y': im_center[0],
        'radius': aperture
    }

    # Measure the flux of the star
    phot_table = aperture_photometry(psf_scaling * psf[0, ],
                                     create_aperture(ap_dict),
                                     method='exact')
    star = phot_table['aperture_sum'][0]

    # Magnitude of the injected planet
    flux_in = snr_inject * t_noise
    mag = -2.5 * math.log10(flux_in / star)

    # Inject the fake planet
    fake = fake_planet(images=images,
                       psf=psf,
                       parang=parang,
                       position=(position[0], position[1]),
                       magnitude=mag,
                       psf_scaling=psf_scaling)

    # Run the PSF subtraction
    _, im_res = pca_psf_subtraction(images=fake * mask,
                                    angles=-1. * parang + extra_rot,
                                    pca_number=pca_number)

    # Stack the residuals
    im_res = combine_residuals(method=residuals, res_rot=im_res)

    # Measure the flux of the fake planet
    flux_out, _, _, _ = false_alarm(image=im_res[0, ],
                                    x_pos=xy_fake[0],
                                    y_pos=xy_fake[1],
                                    size=aperture,
                                    ignore=False)

    # Calculate the amount of self-subtraction
    attenuation = flux_out / flux_in

    # Calculate the detection limit
    contrast = sigma * t_noise / (attenuation * star)

    # The flux_out can be negative, for example if the aperture includes self-subtraction regions
    if contrast > 0.:
        contrast = -2.5 * math.log10(contrast)
    else:
        contrast = np.nan

    # Separation [pix], position antle [deg], contrast [mag], FPF
    return position[0], position[1], contrast, fpf
Exemplo n.º 14
0
    def PCArun(self) -> None:
        """
        Run method of the module. An artificial planet is injected (based on the noise level) at a
        given separation and position angle. The amount of self-subtraction is then determined and
        the contrast limit is calculated for a given sigma level or false positive fraction. A
        correction for small sample statistics is applied for both cases. Note that if the sigma
        level is fixed, the false positive fraction changes with separation, following the
        Student's t-distribution (see Mawet et al. 2014 for details).

        Returns
        -------
        NoneType
            None
        """

        images = self.m_image_in_port.get_all()
        psf = self.m_psf_in_port.get_all()

        if psf.shape[0] != 1 and psf.shape[0] != images.shape[0]:
            raise ValueError(
                f'The number of frames in psf_in_tag {psf.shape} does not match with '
                f'the number of frames in image_in_tag {images.shape}. The '
                f'DerotateAndStackModule can be used to average the PSF frames '
                f'(without derotating) before applying the ContrastCurveModule.'
            )

        cpu = self._m_config_port.get_attribute('CPU')
        working_place = self._m_config_port.get_attribute('WORKING_PLACE')

        parang = self.m_image_in_port.get_attribute('PARANG')
        pixscale = self.m_image_in_port.get_attribute('PIXSCALE')

        self.m_image_in_port.close_port()
        self.m_psf_in_port.close_port()

        if self.m_cent_size is not None:
            self.m_cent_size /= pixscale

        if self.m_edge_size is not None:
            self.m_edge_size /= pixscale

        self.m_aperture /= pixscale

        pos_r = np.arange(self.m_separation[0] / pixscale,
                          self.m_separation[1] / pixscale,
                          self.m_separation[2] / pixscale)

        pos_t = np.arange(self.m_angle[0] + self.m_extra_rot,
                          self.m_angle[1] + self.m_extra_rot, self.m_angle[2])

        if self.m_cent_size is None:
            index_del = np.argwhere(pos_r - self.m_aperture <= 0.)
        else:
            index_del = np.argwhere(
                pos_r - self.m_aperture <= self.m_cent_size)

        pos_r = np.delete(pos_r, index_del)

        if self.m_edge_size is None or self.m_edge_size > images.shape[1] / 2.:
            index_del = np.argwhere(
                pos_r + self.m_aperture >= images.shape[1] / 2.)
        else:
            index_del = np.argwhere(
                pos_r + self.m_aperture >= self.m_edge_size)

        pos_r = np.delete(pos_r, index_del)

        positions = []
        for sep in pos_r:
            for ang in pos_t:
                positions.append((sep, ang))

        result = []
        async_results = []

        # Create temporary files
        tmp_im_str = os.path.join(working_place, 'tmp_images.npy')
        tmp_psf_str = os.path.join(working_place, 'tmp_psf.npy')

        np.save(tmp_im_str, images)
        np.save(tmp_psf_str, psf)

        mask = create_mask(images.shape[-2:],
                           (self.m_cent_size, self.m_edge_size))

        _, im_res = pca_psf_subtraction(images=images * mask,
                                        angles=-1. * parang + self.m_extra_rot,
                                        pca_number=self.m_pca_number)

        noise = combine_residuals(method=self.m_residuals, res_rot=im_res)

        pool = mp.Pool(cpu)

        for pos in positions:
            async_results.append(
                pool.apply_async(
                    contrast_limit,
                    args=(tmp_im_str, tmp_psf_str, noise, mask, parang,
                          self.m_psf_scaling, self.m_extra_rot,
                          self.m_pca_number, self.m_threshold, self.m_aperture,
                          self.m_residuals, self.m_snr_inject, pos)))

        pool.close()

        start_time = time.time()

        # wait for all processes to finish
        while mp.active_children():
            # number of finished processes
            nfinished = sum([i.ready() for i in async_results])

            progress(nfinished, len(positions),
                     'Calculating detection limits...', start_time)

            # check if new processes have finished every 5 seconds
            time.sleep(5)

        if nfinished != len(positions):
            sys.stdout.write(
                '\r                                                      ')
            sys.stdout.write('\rCalculating detection limits... [DONE]\n')
            sys.stdout.flush()

        # get the results for every async_result object
        for item in async_results:
            result.append(item.get())

        pool.terminate()

        os.remove(tmp_im_str)
        os.remove(tmp_psf_str)

        result = np.asarray(result)

        # Sort the results first by separation and then by angle
        indices = np.lexsort((result[:, 1], result[:, 0]))
        result = result[indices]

        result = result.reshape((pos_r.size, pos_t.size, 4))

        mag_mean = np.nanmean(result, axis=1)[:, 2]
        mag_var = np.nanvar(result, axis=1)[:, 2]
        res_fpf = result[:, 0, 3]

        limits = np.column_stack(
            (pos_r * pixscale, mag_mean, mag_var, res_fpf))

        self.m_image_in_port._check_status_and_activate()
        self.m_contrast_out_port._check_status_and_activate()

        self.m_contrast_out_port.set_all(limits, data_dim=2)

        history = f'{self.m_threshold[0]} = {self.m_threshold[1]}'
        self.m_contrast_out_port.add_history('ContrastCurveModule', history)
        self.m_contrast_out_port.copy_attributes(self.m_image_in_port)
        self.m_contrast_out_port.close_port()
Exemplo n.º 15
0
def contrast_limit(
        path_images: str, path_psf: str, noise: np.ndarray, mask: np.ndarray,
        parang: np.ndarray, psf_scaling: float, extra_rot: float,
        pca_number: int, threshold: Tuple[str, float], aperture: float,
        residuals: str, snr_inject: float,
        position: Tuple[float, float]) -> Tuple[float, float, float, float]:
    """
    Function for calculating the contrast limit at a specified position for a given sigma level or
    false positive fraction, both corrected for small sample statistics.

    Parameters
    ----------
    path_images : str
        System location of the stack of images (3D).
    path_psf : str
        System location of the PSF template for the fake planet (3D). Either a single image or a
        stack of images equal in size to science data.
    noise : numpy.ndarray
        Residuals of the PSF subtraction (3D) without injection of fake planets. Used to measure
        the noise level with a correction for small sample statistics.
    mask : numpy.ndarray
        Mask (2D).
    parang : numpy.ndarray
        Derotation angles (deg).
    psf_scaling : float
        Additional scaling factor of the planet flux (e.g., to correct for a neutral density
        filter). Should have a positive value.
    extra_rot : float
        Additional rotation angle of the images in clockwise direction (deg).
    pca_number : int
        Number of principal components used for the PSF subtraction.
    threshold : tuple(str, float)
        Detection threshold for the contrast curve, either in terms of 'sigma' or the false
        positive fraction (FPF). The value is a tuple, for example provided as ('sigma', 5.) or
        ('fpf', 1e-6). Note that when sigma is fixed, the false positive fraction will change with
        separation. Also, sigma only corresponds to the standard deviation of a normal distribution
        at large separations (i.e., large number of samples).
    aperture : float
        Aperture radius (pix) for the calculation of the false positive fraction.
    residuals : str
        Method used for combining the residuals ('mean', 'median', 'weighted', or 'clipped').
    snr_inject : float
        Signal-to-noise ratio of the injected planet signal that is used to measure the amount
        of self-subtraction.
    position : tuple(float, float)
        The separation (pix) and position angle (deg) of the fake planet.

    Returns
    -------
    float
        Separation (pix).
    float
        Position angle (deg).
    float
        Contrast (mag).
    float
        False positive fraction.
    """

    images = np.load(path_images)
    psf = np.load(path_psf)

    # Cartesian coordinates of the fake planet
    yx_fake = polar_to_cartesian(images, position[0], position[1] - extra_rot)

    # Determine the noise level
    noise_apertures = compute_aperture_flux_elements(image=noise[0, ],
                                                     x_pos=yx_fake[1],
                                                     y_pos=yx_fake[0],
                                                     size=aperture,
                                                     ignore=False)

    t_noise = np.std(noise_apertures, ddof=1) * \
              math.sqrt(1 + 1 / (noise_apertures.shape[0]))

    # get sigma from fpf or fpf from sigma
    # Note that the number of degrees of freedom is given by nu = n-1 with n the number of samples.
    # See Section 3 of Mawet et al. (2014) for more details on the Student's t distribution.

    if threshold[0] == 'sigma':
        sigma = threshold[1]

        # Calculate the FPF for a given sigma level

        fpf = t.sf(sigma, noise_apertures.shape[0] - 1, loc=0., scale=1.)

    elif threshold[0] == 'fpf':
        fpf = threshold[1]

        # Calculate the sigma level for a given FPF
        sigma = t.isf(fpf, noise_apertures.shape[0] - 1, loc=0., scale=1.)

    else:
        raise ValueError('Threshold type not recognized.')

    # Aperture properties
    im_center = center_subpixel(images)

    # Measure the flux of the star
    ap_phot = CircularAperture((im_center[1], im_center[0]), aperture)
    phot_table = aperture_photometry(psf_scaling * psf[0, ],
                                     ap_phot,
                                     method='exact')
    star = phot_table['aperture_sum'][0]

    # Magnitude of the injected planet
    flux_in = snr_inject * t_noise
    mag = -2.5 * math.log10(flux_in / star)

    # Inject the fake planet
    fake = fake_planet(images=images,
                       psf=psf,
                       parang=parang,
                       position=(position[0], position[1]),
                       magnitude=mag,
                       psf_scaling=psf_scaling)

    # Run the PSF subtraction
    _, im_res = pca_psf_subtraction(images=fake * mask,
                                    angles=-1. * parang + extra_rot,
                                    pca_number=pca_number)

    # Stack the residuals
    im_res = combine_residuals(method=residuals, res_rot=im_res)
    flux_out_frame = im_res[0, ] - noise[0, ]

    # Measure the flux of the fake planet after PCA
    # the first element is the planet
    flux_out = compute_aperture_flux_elements(image=flux_out_frame,
                                              x_pos=yx_fake[1],
                                              y_pos=yx_fake[0],
                                              size=aperture,
                                              ignore=False)[0]

    # Calculate the amount of self-subtraction
    attenuation = flux_out / flux_in
    # the throughput can not be negative. However, this can happen due to numerical inaccuracies
    if attenuation < 0:
        attenuation = 0

    # Calculate the detection limit
    contrast = (sigma * t_noise + np.mean(noise_apertures)) / (attenuation *
                                                               star)

    # The flux_out can be negative, for example if the aperture includes self-subtraction regions
    if contrast > 0.:
        contrast = -2.5 * math.log10(contrast)
    else:
        contrast = np.nan

    # Separation [pix], position angle [deg], contrast [mag], FPF
    return position[0], position[1], contrast, fpf
Exemplo n.º 16
0
    def _run_single_processing(self, star_reshape, im_shape, indices):
        """
        Internal function to create the residuals, derotate the images, and write the output
        using a single process.

        :return: None
        """

        parang = -1. * self.m_star_in_port.get_attribute(
            "PARANG") + self.m_extra_rot

        pca_number_init = self.m_pca_number_init

        pca_numbers = self.m_components

        residuals_list, res_rot_list = iterative_pca_psf_subtraction(
            images=star_reshape,
            angles=parang,
            pca_numbers=pca_numbers,
            pca_number_init=pca_number_init,
            indices=indices)

        for i, res_rot in enumerate(res_rot_list):
            #progress(i, len(self.m_components), "Creating residuals...")

            history = "max iter PC number = " + str(np.amax(self.m_components))

            # 1.) derotated residuals
            if self.m_res_arr_out_ports is not None:
                self.m_res_arr_out_ports[pca_numbers[i]].set_all(res_rot)
                self.m_res_arr_out_ports[
                    pca_numbers[i]].copy_attributes_from_input_port(
                        self.m_star_in_port)
                self.m_res_arr_out_ports[pca_numbers[i]].add_history_information( \
                    "IterativePcaPsfSubtractionModule", history)

            # 2.) mean residuals
            if self.m_res_mean_out_port is not None:
                stack = combine_residuals(method="mean", res_rot=res_rot)
                self.m_res_mean_out_port.append(stack, data_dim=3)

            # 3.) median residuals
            if self.m_res_median_out_port is not None:
                stack = combine_residuals(method="median", res_rot=res_rot)
                self.m_res_median_out_port.append(stack, data_dim=3)

            # 4.) noise-weighted residuals
            if self.m_res_weighted_out_port is not None:
                stack = combine_residuals(method="weighted",
                                          res_rot=res_rot,
                                          residuals=residuals_list[i],
                                          angles=parang)

                self.m_res_weighted_out_port.append(stack, data_dim=3)

            # 5.) clipped mean residuals
            if self.m_res_rot_mean_clip_out_port is not None:
                stack = combine_residuals(method="clipped", res_rot=res_rot)
                self.m_res_rot_mean_clip_out_port.append(stack, data_dim=3)

        sys.stdout.write("Creating residuals... [DONE]\n")
        sys.stdout.flush()
Exemplo n.º 17
0
    def run(self) -> None:
        """
        Run method of the module. Selects for each image the reference images closest in line while
        taking into account a rotation threshold for a fixed separation, median-combines the
        references images, and subtracts the reference image from each image separately.
        Alternatively, a single, median-combined reference image can be created and subtracted from
        all images. All images are used if the rotation condition can not be met. Both the
        individual residuals (before derotation) and the stacked residuals are stored.

        Returns
        -------
        NoneType
            None
        """

        parang = -1. * self.m_image_in_port.get_attribute(
            'PARANG') + self.m_extra_rot

        nimages = self.m_image_in_port.get_shape()[0]

        if self.m_threshold:
            parang_thres = 2. * math.atan2(
                self.m_threshold[2] * self.m_threshold[1],
                2. * self.m_threshold[0])
            parang_thres = math.degrees(parang_thres)
            reference = None

        else:
            parang_thres = None
            reference = self.m_image_in_port.get_all()
            reference = np.median(reference, axis=0)

        ang_diff = np.zeros((nimages, parang.shape[0]))

        for i in range(nimages):
            ang_diff[i, :] = np.abs(parang[i] - parang)

        self.apply_function_to_images(
            subtract_psf,
            self.m_image_in_port,
            self.m_res_out_port,
            'Classical ADI',
            func_args=(parang_thres, self.m_nreference, reference, ang_diff,
                       self.m_image_in_port))

        self.m_res_in_port = self.add_input_port(self.m_res_out_port._m_tag)
        im_res = self.m_res_in_port.get_all()

        res_rot = np.zeros(im_res.shape)
        for i, item in enumerate(parang):
            res_rot[i, ] = rotate(im_res[i, ], item, reshape=False)

        stack = combine_residuals(self.m_residuals,
                                  res_rot,
                                  residuals=im_res,
                                  angles=parang)

        self.m_stack_out_port.set_all(stack)

        if self.m_threshold:
            history = f'threshold [deg] = {parang_thres:.2f}'
        else:
            history = 'threshold [deg] = None'

        self.m_res_out_port.copy_attributes(self.m_image_in_port)
        self.m_res_out_port.add_history('ClassicalADIModule', history)

        self.m_stack_out_port.copy_attributes(self.m_image_in_port)
        self.m_stack_out_port.add_history('ClassicalADIModule', history)

        self.m_res_out_port.close_port()
Exemplo n.º 18
0
def pixel_variance(var_type: str, images: np.ndarray, parang: np.ndarray,
                   cent_size: Optional[float], edge_size: Optional[float],
                   pca_number: int, residuals: str,
                   aperture: Tuple[int, int, float], sigma: float) -> float:
    """
    Function to calculate the variance of the noise. After the PSF subtraction, images are rotated
    in opposite direction of the regular derotation, therefore dispersing any companion or disk
    signal. The noise is measured within an annulus.

    Parameters
    ----------
    var_type : str
        Variance type ('gaussian' or 'hessian').
    images : numpy.ndarray
        Input images (3D).
    parang : numpy.ndarray
        Parallactic angles.
    cent_size : float, None
        Radius of the central mask (pix). No mask is used when set to None.
    edge_size : float, None
        Outer radius (pix) beyond which pixels are masked. No outer mask is used when set to
        None.
    pca_number : int
        Number of principal components (PCs) used for the PSF subtraction.
    residuals : str
        Method for combining the residuals ('mean', 'median', 'weighted', or 'clipped').
    aperture : tuple(int, int, float)
        Aperture position (y, x) and radius (pix).
    sigma : float, None
        Standard deviation (pix) of the Gaussian kernel which is used to smooth the images.

    Returns
    -------
    float
        Variance of the pixel values. Either the variance of the pixel values ('gaussian') or
        the variance of the determinant of the Hessian ('hessian').
    """

    mask = create_mask(images.shape[-2:], (cent_size, edge_size))

    _, im_res_derot = pca_psf_subtraction(images * mask, parang, pca_number)

    res_noise = combine_residuals(residuals, im_res_derot)

    sep_ang = cartesian_to_polar(center_subpixel(res_noise), aperture[0],
                                 aperture[1])

    if var_type == 'gaussian':
        selected = select_annulus(res_noise[0, ], sep_ang[0] - aperture[2],
                                  sep_ang[0] + aperture[2])

    elif var_type == 'hessian':
        hessian_rr, hessian_rc, hessian_cc = hessian_matrix(
            image=res_noise[0, ],
            sigma=sigma,
            mode='constant',
            cval=0.,
            order='rc')

        hes_det = (hessian_rr * hessian_cc) - (hessian_rc * hessian_rc)

        selected = select_annulus(hes_det, sep_ang[0] - aperture[2],
                                  sep_ang[0] + aperture[2])

    return float(np.var(selected))