コード例 #1
0
    def read_table_file(file):
        """Interprets text in a file input.

        Args:
            file (str):
                Name of the file containing the table.

        Returns:
            table (astropy.Table):
                Table based on file input.
        """

        try:
            table = Table.read(file)
        except IORegistryError:
            try:
                table = Table.read(file, format='ascii.fixed_width')
            except IORegistryError:
                table = Table.read(file, format='ascii.no_header')
        except:
            files = []
            logger.info("Reading file names from input file {}.".format(file))
            with open(file, 'r') as f:
                for filename in f.readlines():
                    filename = filename.replace('\n', '')
                    files.append(filename)
            table = Table(data=[files], names=['FILE'], dtype=[object])

        return table
コード例 #2
0
ファイル: plots.py プロジェクト: deepin00/specklepy
def save_figure(file_name=None):
    """Save figure to a file, if a file name is provided.

    Args:
        file_name (str, optional):
            Name of the file to store the figure to. Nothing is done if not provided.
    """
    if file_name is not None:
        logger.info(f"Saving figure to {file_name}")

        # Identify requested file extension
        root, extension = os.path.splitext(file_name)
        extensions = ['.pdf', '.png', extension]

        # Save figure in multiple formats
        for ext in extensions:
            plot_file = root + ext
            try:
                plt.savefig(plot_file, bbox_inches='tight', pad_inches=0)
            except FileNotFoundError:
                path, file_name = os.path.split(plot_file)
                if not os.path.exists(path):
                    logger.info(f"Making dir {path}")
                    os.mkdir(path=path)
                plt.savefig(plot_file, bbox_inches='tight', pad_inches=0)
コード例 #3
0
 def data(self, data):
     with fits.open(self.file_path, mode='update') as hdu_list:
         hdu_list[0].data = data
         hdu_list[0].header.set('UPDATED', str(datetime.now()))
         hdu_list.flush()
     if self.verbose:
         logger.info(f"Updating data in {self.file_path}")
コード例 #4
0
    def get_encircled_energy(self, saveto=None):
        """Extracts the encircled energy from an aperture as a function of
        radius."""

        # Initialize output radii and array
        radius_map = self.make_radius_map()
        rdata = np.unique(radius_map)
        ydata = np.zeros(rdata.shape)
        edata = np.zeros(rdata.shape)

        # Extract 2D image
        image = self.get_integrated()

        # Iterate over aperture radii
        for index, radius in enumerate(rdata):
            subset = image[np.where(radius_map <= radius)]
            ydata[index] = np.sum(image)
            edata[index] = np.sum(image)

        # Save results to file
        if saveto is not None:
            header = "radius_(pix) encircled_energy(data_unit)"
            data = np.concatenate(([rdata], [ydata]), axis=0).transpose()
            np.savetxt(saveto, data, header=header)
            logger.info(
                "Saved encircled energy data to file {}".format(saveto))

        return rdata, ydata, edata
コード例 #5
0
 def powerspec(self):
     self.Fourier_data = np.zeros(self.data.shape)
     for index, frame in enumerate(self.data):
         print("\rFourier transforming frame {}/{}".format(
             index + 1, self.data.shape[0]),
               end='')
         self.Fourier_data[index] = tf.powerspec(frame)
     print()
     logger.info("Computed the Fourier transform of every frame.")
コード例 #6
0
def start():
    logger.info("Initializing application...")
    app = QtWidgets.QApplication(sys.argv)

    logger.info("Initializing GUI...")
    win = Window()
    win.show()

    sys.exit(app.exec_())
コード例 #7
0
ファイル: plots.py プロジェクト: deepin00/specklepy
def desaturate_color(color,
                     ncolors=1,
                     saturation_values=None,
                     saturation_min=0.1):
    """Desaturates a color and returns a list of desaturated colors.

    Args:
        color (str or ...):
        ncolors (int): Number of returned colors.
        saturation_values (None or list, dtype=float):
        saturation_min (float): Minimum value of saturation.

    Returns:
        colors (list): List of RGB represnations of colors with length ncolors.
    """

    # Input parameters
    if isinstance(color, str):
        rgb_color = clrs.to_rgb(color)
        hsv_color = clrs.rgb_to_hsv(rgb_color)
    elif isinstance(color, tuple):
        logger.info("Interpreting color tuple () as RGB values.")
        hsv_color = clrs.rgb_to_hsv(color)
    else:
        raise SpecklepyTypeError('desaturate_color()', 'color', type(color),
                                 'str')

    if not isinstance(ncolors, int):
        raise SpecklepyTypeError('desaturate_color()', 'ncolors',
                                 type(ncolors), 'int')

    if not isinstance(saturation_min, float):
        raise SpecklepyTypeError('desaturate_color()', 'saturation_min',
                                 type(saturation_min), 'float')

    if saturation_values is None:
        saturation_values = np.linspace(hsv_color[1],
                                        saturation_min,
                                        num=ncolors)
    elif isinstance(saturation_values, list):
        pass  # list is correct, nothing to adapt
    elif isinstance(saturation_values, float):
        saturation_values = list(saturation_values)
    else:
        raise SpecklepyTypeError('desaturate_color()', 'saturation_values',
                                 type(saturation_values), 'list')

    # Create list of colors with varied saturation values
    colors = []
    for saturation_value in saturation_values:
        color = clrs.hsv_to_rgb((hsv_color[0], saturation_value, hsv_color[2]))
        colors.append(color)

    return colors
コード例 #8
0
    def makedirs(self, dir_list):
        """
        This function makes sure that the paths exist and creates if not.
        """

        for key in dir_list:
            path = getattr(self.paths, key)
            path = os.path.dirname(
                path) + '/'  # Cosmetics to allow for generic input for inDir
            if not os.path.exists(path):
                logger.info(f"Creating {key} directory {path}")
                os.makedirs(path)
コード例 #9
0
 def crop(self):
     if self.cropped:
         logger.info("Margins are removed already from aperture instance.")
     else:
         if self.data.ndim == 2:
             self.data = copy(
                 self.data[self.x0 - self.radius:self.x0 + self.radius + 1,
                           self.y0 - self.radius:self.y0 + self.radius + 1])
         elif self.data.ndim == 3:
             self.data = copy(
                 self.data[:,
                           self.x0 - self.radius:self.x0 + self.radius + 1,
                           self.y0 - self.radius:self.y0 + self.radius + 1])
         self.cropped = True
コード例 #10
0
    def coadd_fft(self):
        """Co-add the Fourier transforms of the image and PSF frames.

        Returns:
            fourier_image (np.ndarray, dtype=np.comlex128):
                Fourier-transformed object reconstruction.
        """

        # Padding and Fourier transforming the images
        logger.info("Padding the images and PSFs...")

        for file_index in trange(len(self.in_files), desc="Processing files"):

            # Open PSF and image files
            psf_cube = fits.getdata(self.psf_files[file_index])
            image_cube = fits.getdata(
                os.path.join(self.in_dir, self.in_files[file_index]))
            n_frames = image_cube.shape[0]

            for frame_index in trange(n_frames,
                                      desc="Fourier transforming frames"):

                # Padding and transforming the image
                img = pad_array(
                    array=image_cube[frame_index],
                    pad_vector=self.pad_vectors[file_index],
                    mode=self.mode,
                    reference_image_pad_vector=self.reference_image_pad_vector)
                f_img = fftshift(fft2(img))

                # Padding and Fourier transforming PSF
                psf = psf_cube[frame_index]
                psf = np.pad(
                    psf,
                    self.psf_pad_vector,
                    mode='constant',
                )
                f_psf = fftshift(fft2(psf))

                # Co-adding for the average
                self.enumerator += np.multiply(f_img, np.conjugate(f_psf))
                self.denominator += np.abs(np.square(f_psf))

        # Compute the object:
        # Note that by this division implicitly does averaging. By this implicit summing up of enumerator and
        # denominator, this computation is cheaper in terms of memory usage
        self.fourier_image = np.divide(self.enumerator, self.denominator)

        return self.fourier_image
コード例 #11
0
    def powerspec_to_file(self, infile=None, Fourier_file=None):
        if not hasattr(self, 'Fourier_file'):
            self.initialize_Fourier_file(infile, Fourier_file)

        with fits.open(self.Fourier_file, mode='update') as hdulist:
            for index, frame in enumerate(self.data):
                print("\rFourier transforming frame {}/{}".format(
                    index + 1, self.data.shape[0]),
                      end='')
                hdulist[0].data[index] = tf.powerspec(frame)
                hdulist.flush()
            print()
        logger.info(
            "Computed the Fourier transform of every frame and saved them to {}"
            .format(self.Fourier_file))
コード例 #12
0
    def apodize(self, type, radius, crop=False):
        """Apodize the Fourier object with a Gaussian or Airy disk kernel.

        Args:
            type (str):
                Type of the apodization. Can be either `Gaussian` or `Airy`. See specklepy.core.psfmodel for details.
            radius (float):
                Radius of the apodization kernel. This is the standard deviation of a Gaussian kernel or the radius of
                first zero in the case of an Airy function.
            crop (bool, optional):
                Crop corners of the PSF and set them to zero.

        Returns:
            apodized (np.array, dtype=np.complex128):
                Apodized Fourier-plane image.
        """

        # Assert image shape
        if self.fourier_image.shape[0] != self.fourier_image.shape[1]:
            logger.warning(
                "The apodization is applied to a non-quadratic input image. This may cause some "
                "unpredictable results!")

        logger.info("Apodizing the object...")
        if type is None and radius is None:
            logger.warning(
                f"Apodization is skipped for either type or radius not being defined!"
            )
            return self.fourier_image

        # Interpret function input and compute apodization PSF
        psf_model = PSFModel(type=type, radius=radius)
        apodization_psf = psf_model(self.fourier_image.shape)

        # Crop corners of the PSF
        if crop:
            threshold = apodization_psf[0, int(psf_model.center[1])]
            apodization_psf -= threshold
            apodization_psf = np.maximum(apodization_psf, 0.0)

        # Normalize to unity
        apodization_psf /= np.sum(apodization_psf)

        # Transform into Fourier space
        apodization_otf = otf(apodization_psf)
        self.fourier_image = np.multiply(self.fourier_image, apodization_otf)

        return self.fourier_image
コード例 #13
0
 def initialize_Fourier_file(self, infile, Fourier_file):
     self.infile = infile
     self.Fourier_file = Fourier_file
     logger.info("Initializing Fourier file {}".format(self.Fourier_file))
     header = fits.getheader(self.infile)
     header.set('HIERARCH specklepy TYPE',
                'Fourier transform of an aperture')
     header.set('HIERARCH specklepy ORIGIN', self.infile)
     header.set('HIERARCH specklepy APERTURE INDEX', str(self.index))
     header.set('HIERARCH specklepy APERTURE RADIUS', self.radius)
     header.set('UPDATED', str(datetime.now()))
     data = np.zeros(self.data.shape)
     fits.writeto(self.Fourier_file,
                  data=data,
                  header=header,
                  overwrite=True)
     logger.info("Initialized {}".format(self.Fourier_file))
コード例 #14
0
ファイル: resolution.py プロジェクト: deepin00/specklepy
def get_resolution_parameters(wavelength, diameter, pixel_scale=None):
    airy_zero = first_airy_zero(wavelength=wavelength, diameter=diameter)
    logger.info(f"First airy zero: {airy_zero:.3e} rad | {rad2mas(airy_zero):.3f} mas")
    if pixel_scale:
        logger.info(f"First airy zero: {rad2mas(airy_zero) / pixel_scale:.3f} pix")

    sigma = airy_zero / 1.22 * 0.42
    logger.info(f"Gaussian sigma: {sigma:.3e} rad | {rad2mas(sigma):.3f} mas")
    if pixel_scale:
        logger.info(f"Gaussian sigma: {rad2mas(sigma) / pixel_scale:.3f} pix")
コード例 #15
0
    def ifft(self, total_flux=None):
        """Compute the image by an inverse Fourier transformation of the Fourier-plane image.

        Args:
            total_flux (float, optional):
                Total flux value. The image will be rescaled to obey the total flux, if provided.

        Returns:
            image (np.ndarray):
                Image-plane image.
        """
        logger.info("Inverse Fourier transformation of the object...")
        image = ifft2(self.fourier_image)
        image = np.abs(image)
        if total_flux is not None:
            image_scale = total_flux / np.sum(image)
            image = np.multiply(image, image_scale)
        return image
コード例 #16
0
ファイル: analysis.py プロジェクト: deepin00/specklepy
def get_psf_variation(file,
                      index,
                      radius,
                      out_file=None,
                      normalize=None,
                      debug=False):
    if isinstance(index, list):
        if len(index) is 1:
            if index[0] is 0:
                logger.info(
                    f"Estimate image intensity peak and use as aperture index")
                image = fits.getdata(file)
                if image.ndim == 3:
                    image = np.sum(image, axis=0)
                index = np.unravel_index(np.argmax(image), image.shape)
                logger.info(f"Index is set to {index}")
            else:
                index = (index[0], index[0])
        index = tuple(index)

    if file is None:
        raise RuntimeError("No file was provided!")

    if out_file is None:
        out_file = "var_" + os.path.basename(file).replace(".fits", ".dat")

    # Initialize the aperture
    aperture = Aperture(index, radius, data=file, crop=True)
    if debug:
        imshow(aperture.get_integrated(), maximize=False)

    # Extract PSF profile
    logger.info(f"Extracting PSF profile from file {file}")
    xdata, ydata, edata = aperture.get_psf_variance()

    # Normalize profile
    if normalize == 'peak':
        ydata /= ydata[0]
        edata /= ydata[0]
    elif normalize == 'aperture':
        ydata /= ydata[-1]
        edata /= ydata[-1]
    elif normalize is not None:
        raise ValueError(
            "Normalize must be either 'peak', 'aperture, or None!'")

    # Save encircled energy data to outfile
    out_table = Table(data=[xdata, ydata, edata],
                      names=['Radius', 'Variance', 'dVariance'])
    logger.info(f"Store PSF profile to {out_file}")
    out_table.write(out_file, overwrite=True, format='ascii.fixed_width')
コード例 #17
0
def subtract_scalar_background(files, params, prefix=None, debug=False):
    """Estimate and subtract a scalar background."""

    if not isinstance(files, (list, np.ndarray)):
        raise SpecklepyTypeError('substract_scalar_background',
                                 argtype=type(files),
                                 argname='files',
                                 expected='list')
    else:
        if len(files) == 0:
            raise RuntimeError(
                "Sky subtraction received an empty list of files!")

    logger.info("Estimating scalar background and subtract...")
    for file_index, file in enumerate(files):

        image, header = fits.getdata(os.path.join(params.paths.filePath, file),
                                     header=True)

        # Update header for and initialize the outfile
        header.set('PIPELINE', 'SPECKLEPY')
        header.set('SKYCORR', str(datetime.now()))
        corrected_file = prefix + file
        corrected_file = os.path.join(params.paths.filePath, corrected_file)
        outfile = Outfile(filename=corrected_file,
                          header=header,
                          shape=image.shape)

        # Estimate scalar background and uncertainties, and subtract
        if image.ndim == 2:
            mean, median, std = sigma_clipped_stats(
                image, sigma=params.sky.backgroundSigmaClip)
            outfile.data = image - mean
            image_var = np.ones(image.shape) * np.square(std)
            outfile.new_extension(name='VAR', data=image_var)
        elif image.ndim == 3:
            means, medians, stds = sigma_clipped_stats(
                image, sigma=params.sky.backgroundSigmaClip, axis=(1, 2))
            logger.info(
                f"Sigma clipped stats:\t{np.mean(means):.2f} +- {np.mean(stds):.2f}"
            )
            outfile.new_extension(name='VAR', data=np.zeros(image.shape))
            tmp_frame = np.ones(image[0].shape)

            for frame_index, frame in enumerate(image):
                print(f"\r\tUpdating frame {frame_index+1:3}...", end='')
                outfile.update_frame(frame_index=frame_index,
                                     data=np.subtract(frame,
                                                      means[frame_index]))
                outfile.update_frame(frame_index=frame_index,
                                     data=tmp_frame *
                                     np.square(stds[frame_index]),
                                     extension='VAR')
            print()
        else:
            raise RuntimeError(
                f"Images are supposed to have 2 or 3 dimensions but this one has {image.ndim}!"
            )

    logger.info("Scalar background subtraction complete!")
コード例 #18
0
    def initialize_product_files(self, prefix=None):
        """Copy the science data cubes into the stored out directory.

        Args:
            prefix (str, optional):
                File prefix for output files.

        Returns:
            product_files (list):
                List of paths of the data reduction products.
        """

        # Store update prefix
        if prefix:
            self.out_prefix = prefix

        # Initialize list of data reduction products
        product_files = []

        # Copy the science data cubes into outdir (with an additional file prefix)
        for file in self.filter({'OBSTYPE': ['SKY', 'SCIENCE']}):
            src = os.path.join(self.in_dir, file)
            dest = os.path.join(self.out_dir, self.out_prefix + file)
            logger.info(f"Initializing data product file {dest}")
            os.system(f"cp {src} {dest}")
            with fits.open(dest, mode='update') as hdu_list:
                hdu_list[0].header.set('PIPELINE', 'SPECKLEPY')
                hdu_list[0].header.set(
                    'REDUCED',
                    datetime.now().strftime('%Y-%m-%d_%H:%M:%S'))
                hdu_list.flush()

            # Store new file in the list of product files
            product_files.append(dest)

        # Store list of product files
        self.product_files = product_files

        return product_files
コード例 #19
0
    def create_long_exposures(self, alignment_method):
        """Compute long exposures from the input data cubes."""

        # Initialize list of long exposure files
        long_exposure_files = []

        # Iterate over input data cubes
        for file in self.in_files:

            # Read data from file
            cube = fits.getdata(os.path.join(self.in_dir, file))
            image = None
            image_var = None

            # Compute collapsed or SSA'ed images from the cube
            if alignment_method == 'collapse':
                image = np.sum(cube, axis=0)
                tmp_file = 'int_' + os.path.basename(file)
            elif alignment_method == 'ssa':
                image, image_var = coadd_frames(cube=cube, box=self.box)
                tmp_file = 'ssa_' + os.path.basename(file)
            else:
                raise SpecklepyValueError('Reconstruction', 'alignment_method', alignment_method,
                                          expected="either 'collapse' or 'ssa'")

            # Store data to a new Outfile instance
            tmp_path = os.path.join(self.tmp_dir, tmp_file)
            logger.info(f"Saving temporary reconstruction of cube {file} to {tmp_path}")
            tmp_file_object = Outfile(tmp_path, data=image, verbose=True)
            if image_var is not None:
                tmp_file_object.new_extension(name=self.var_ext, data=image_var)

            # Add the recently created file to the list
            long_exposure_files.append(tmp_file)

        return long_exposure_files
コード例 #20
0
    def identify_setups(self, keywords):
        """Identify distinct observational setups in a list of files.

        This function identifies distinct observational setups.

        Args:
            keywords (list of str):
        """

        # Check input parameters
        if not isinstance(keywords, list):
            raise SpecklepyTypeError('identify_setups', 'keywords',
                                     type(keywords), 'list')

        # Identifying setups key-by-key
        logger.info(
            "Identifying distinct observational setups in the file list...")
        # self.table['SETUP'] = [None] * len(self.table)
        self.table.add_column(
            col=Column(data=[None] * len(self.table), name='SETUP'))

        # Iterate over keywords and identify unique settings per key
        for key in keywords:
            try:
                unique = np.unique(self.table[key].data)
            except KeyError:
                logger.info(
                    f"Key {key} is not available in the file table and will be ignored!"
                )
                continue
            logger.info(
                f"Identified {len(unique)} setups by keyword {key}:\t{unique}")

            for index, setup in enumerate(unique):
                for row in self.table:
                    if row[key] == setup:
                        if row['SETUP'] is None:
                            row['SETUP'] = str(index)
                        else:
                            row['SETUP'] += str(index)

        # Overwrite setup keys by length-1 string
        combinations = np.unique(self.table['SETUP'].data)
        for index, combination in enumerate(combinations):
            row_indexes = np.where(self.table['SETUP'].data == combination)
            self.table['SETUP'][row_indexes] = string.ascii_uppercase[index]
コード例 #21
0
ファイル: setup.py プロジェクト: deepin00/specklepy
def gather_header_information(path,
                              instrument,
                              par_file=None,
                              list_file=None,
                              sort_by=None):
    """Sets up the data reduction parameter file and file list.

    Args:
        path (str):
            Path to the files.
        instrument (str):
            Name of the instrument that took the data. This must be covered by config/instruments.cfg.
        par_file (str, optional):
            Name of the output default parameter file for the reduction.
        list_file (str):
            Name of the output file that contains all the file names and header information.
        sort_by (str, optional):
            Header card that is used for the sorting of files.
    """

    # Defaults
    header_cards = [
        'OBSTYPE', 'OBJECT', 'FILTER', 'EXPTIME', 'nFRAMES', 'DATE'
    ]
    dtypes = [str, str, str, float, int, str]
    instrument_config_file = os.path.join(os.path.dirname(__file__),
                                          '../config/instruments.cfg')

    # Read config
    configs = config.read(instrument_config_file)
    instrument = configs['INSTRUMENTS'][instrument]
    instrument_header_cards = configs[instrument]

    # Double check whether all aliases are defined
    for card in header_cards:
        try:
            instrument_header_cards[card]
        except KeyError:
            logger.info(
                f"Dropping header card {card} from setup identification, as there is no description in the config file."
                f"\nCheck out {instrument_config_file} for details.")
            header_cards.remove(card)

    # Apply fall back values
    if path is None:
        path = '../reduction'
    if list_file is None:
        list_file = 'files.tab'
    if par_file is None:
        par_file = 'reduction.yaml'

    # Find files
    if '*' in path:
        files = glob.glob(path)
    else:
        files = glob.glob(os.path.join(path, '*fits'))
    if len(files):
        logger.info(f"Found {len(files)} file(s)")
        files.sort()
    else:
        logger.error(f"Found no files in {path}!")
        raise RuntimeError(f"Found no files in {path}!")

    # Initialize output file information table
    table = Table(names=['FILE'] + header_cards, dtype=[str] + dtypes)

    # Read data from files
    for file in files:
        logger.info(f"Retrieving header information from file {file}")
        hdr = fits.getheader(file)
        new_row = [os.path.basename(file)]
        for card in header_cards:
            try:
                new_row.append(hdr[instrument_header_cards[card]])
            except KeyError:
                logger.info(
                    f"Skipping file {os.path.basename(file)} due to at least one missing header card "
                    f"({instrument_header_cards[card]}).")
                break
        if len(new_row) == len(table.columns):
            table.add_row(new_row)

    # Sort table entries by default properties and user request
    table.sort('FILE')
    table.sort('OBSTYPE')
    if sort_by:
        table.sort(sort_by)

    # Identify instrument setups
    setups = identify_instrument_setups(table)
    table.add_column(setups)

    # Save table
    logger.info(f"Writing header information to {list_file}")
    table.write(list_file, format='ascii.fixed_width', overwrite=True)

    # Write dummy parameter file for the reduction
    _, ext = os.path.splitext(par_file)
    if 'yaml' in ext:
        logger.info(
            f"Creating default reduction YAML parameter file {par_file}")
        par_file_content = f"PATHS:\n  filePath: {path}\n  fileList: {list_file}\n  outDir: Science/\n  tmpDir: tmp/" \
                           f"\n\nFLAT:\n  masterFlatFile: MasterFlat.fits" \
                           f"\n\nSKY:\n  method: scalar"
    else:
        logger.info(
            f"Creating default reduction INI parameter file {par_file}")
        par_file_content = f"[PATHS]\nfilePath = {path}\nfileList = {list_file}\noutDir = Science/\ntmpDir = tmp/" \
                           f"\n\n[FLAT]\nmasterFlatFile = MasterFlat.fits" \
                           f"\n\n[SKY]\nmethod = scalar"
    with open(par_file, 'w+') as par_file:
        par_file.write(par_file_content)
コード例 #22
0
    def __init__(self,
                 in_file,
                 out_dir,
                 frame_shape,
                 in_dir=None,
                 cards=None,
                 header_card_prefix=None):
        """Create a PSFFile instance.

        Args:
            in_file (str):
                Name of the parent file.
            out_dir (str):
                Name of the directory that the file will be stored in.
            frame_shape (tuple):
                Shape of the PSF frames, which is the box size.
            in_dir (str, optional):
                Path to the input file.
            cards (dict, optional):
                Dictionary of header cards.
            header_card_prefix (str, optional):
        """

        # Create PSF directory, if not existing yet
        if not os.path.exists(out_dir):
            logger.info(f"Creating PSF directory {out_dir}")
            os.makedirs(out_dir)

        # Adapt filename to form the name of the out_file
        # _, out_file = os.path.split(in_file)
        # out_file = out_file.replace('.fits', '_psfs.fits')
        out_file = 'psf_' + os.path.basename(in_file)
        # self.filename = outDir + out_file

        # Type assertion
        if not isinstance(frame_shape, tuple):
            raise SpecklepyTypeError('PSFFile', 'frame_shape',
                                     type(frame_shape), 'tuple')

        if cards is None:
            cards = {}
        elif not isinstance(cards, dict):
            raise SpecklepyTypeError('PSFFile', 'cards', type(cards), 'dict')

        if header_card_prefix is None:
            header_card_prefix = ""
        elif not isinstance(header_card_prefix, str):
            raise SpecklepyTypeError('PSFFile', 'header_card_prefix',
                                     type(header_card_prefix), 'str')

        # Add name of parent file to header
        cards["FILE NAME"] = os.path.basename(in_file)

        # Derive data shape
        if in_dir is not None:
            hdr_input = fits.getheader(os.path.join(in_dir, in_file))
        else:
            hdr_input = fits.getheader(in_file)
        shape = (hdr_input['NAXIS3'], frame_shape[0], frame_shape[1])

        super().__init__(filename=out_file,
                         path=out_dir,
                         shape=shape,
                         cards=cards,
                         header_card_prefix=header_card_prefix)
コード例 #23
0
ファイル: ssa.py プロジェクト: deepin00/specklepy
def ssa(files,
        mode='same',
        reference_file=None,
        outfile=None,
        in_dir=None,
        tmp_dir=None,
        lazy_mode=True,
        box_indexes=None,
        debug=False,
        **kwargs):
    """Compute the SSA reconstruction of a list of files.

    The simple shift-and-add (SSA) algorithm makes use of the structure of typical speckle patterns, i.e.
    short-exposure point-spread functions (PSFs). These show multiple peaks resembling the diffraction-limited PSF of
    coherent fractions within the telescope aperture. Under good conditions or on small telescopes, there is typically
    one largest coherent atmospheric cell and therefore, speckle PSFs typically show one major intensity peak. The
    algorithm makes use of this fact and identifies the emission peak in a given observation frame, assuming that this
    always belongs to the same star, and aligns all frames on the coordinate of the emission peak.

    See Bates & Cady (1980) for references.

    Args:
        files (list or array_like):
            List of complete paths to the fits files that shall be considered for the SSA reconstruction.
        mode (str):
            Name of the reconstruction mode: In 'same' mode, the reconstruction covers the same field of view of the
            reference file. In 'full' mode, every patch of the sky that is covered by at least one frame will be
            contained in the final reconstruction.
        reference_file (str, int, optional):
            Path to a reference file or index of the file in files, relative to which the shifts are computed. See
            specklepy.core.aligment.get_shifts for details. Default is 0.
        outfile (specklepy.io.recfile, optional):
            Object to write the result to, if provided.
        in_dir (str, optional):
            Path to the files. `None` is substituted by an empty string.
        tmp_dir (str, optional):
            Path of a directory in which the temporary results are stored in.
        lazy_mode (bool, optional):
            Set to False, to enforce the alignment of a single file with respect to the reference file. Default is True.
        box_indexes (list, optional):
            Constraining the search for the intensity peak to the specified box. Searching the full frames if not
            provided.
        debug (bool, optional):
            Show debugging information. Default is False.

    Returns:
        reconstruction (np.ndarray):
            The image reconstruction. The size depends on the mode argument.
    """

    logger.info("Starting SSA reconstruction...")
    # Check parameters
    if not isinstance(files, (list, np.ndarray)):
        if isinstance(files, str):
            files = [files]
        else:
            raise SpecklepyTypeError('ssa()',
                                     argname='files',
                                     argtype=type(files),
                                     expected='list')

    if isinstance(mode, str):
        if mode not in ['same', 'full', 'valid']:
            raise SpecklepyValueError('ssa()',
                                      argname='mode',
                                      argvalue=mode,
                                      expected="'same', 'full' or 'valid'")
    else:
        raise SpecklepyTypeError('ssa()',
                                 argname='mode',
                                 argtype=type(mode),
                                 expected='str')

    if reference_file is None:
        reference_file = files[0]
    elif isinstance(reference_file, int):
        reference_file = files[reference_file]
    elif not isinstance(reference_file, str):
        raise SpecklepyTypeError('ssa()',
                                 argname='reference_file',
                                 argtype=type(reference_file),
                                 expected='str or int')

    if outfile is None:
        pass
    elif isinstance(outfile, str):
        outfile = ReconstructionFile(files=files,
                                     filename=outfile,
                                     cards={"RECONSTRUCTION": "SSA"})
    elif isinstance(outfile, ReconstructionFile):
        pass
    else:
        raise SpecklepyTypeError('ssa()',
                                 argname='outfile',
                                 argtype=type(outfile),
                                 expected='str')

    if in_dir is None:
        in_dir = ''
    reference_file = os.path.join(in_dir, reference_file)

    if tmp_dir is not None:
        if isinstance(tmp_dir, str) and not os.path.isdir(tmp_dir):
            os.makedirs(tmp_dir)

    if not isinstance(lazy_mode, bool):
        raise SpecklepyTypeError('ssa()',
                                 argname='lazy_mode',
                                 argtype=type(lazy_mode),
                                 expected='bool')

    if box_indexes is not None:
        box = Box(box_indexes)
    else:
        box = None

    if 'variance_extension_name' in kwargs.keys():
        var_ext = kwargs['variance_extension_name']
    else:
        var_ext = 'VAR'

    if debug:
        logger.setLevel('DEBUG')
        logger.handlers[0].setLevel('DEBUG')
        logger.info("Set logging level to DEBUG")

    # Align reconstructions if multiple files are provided
    if lazy_mode and len(files) == 1:

        # Do not align just a single file
        with fits.open(os.path.join(in_dir, files[0])) as hdu_list:
            cube = hdu_list[0].data
            if var_ext in hdu_list:
                var_cube = hdu_list[var_ext].data
            else:
                var_cube = None
            reconstruction, reconstruction_var = coadd_frames(
                cube, var_cube=var_cube, box=box)

    else:

        # Compute temporary reconstructions of the individual cubes
        tmp_files = []
        for index, file in enumerate(files):
            with fits.open(os.path.join(in_dir, file)) as hdu_list:
                cube = hdu_list[0].data
                if var_ext in hdu_list:
                    var_cube = hdu_list[var_ext].data
                    logger.debug(
                        f"Found variance extension {var_ext} in file {file}")
                else:
                    logger.debug(
                        f"Did not find variance extension {var_ext} in file {file}"
                    )
                    var_cube = None
                tmp, tmp_var = coadd_frames(cube, var_cube=var_cube, box=box)

            if debug:
                imshow(box(tmp), norm='log')

            tmp_file = os.path.basename(file).replace(".fits", "_ssa.fits")
            tmp_file = os.path.join(tmp_dir, tmp_file)
            logger.info(
                "Saving interim SSA reconstruction of cube to {}".format(
                    tmp_file))
            tmp_file_object = Outfile(tmp_file, data=tmp, verbose=True)

            # Store variance of temporary reconstruction
            if tmp_var is not None:
                tmp_file_object.new_extension(var_ext, data=tmp_var)
                del tmp_var
            tmp_files.append(tmp_file)

        # Align tmp reconstructions and add up
        file_shifts, image_shape = alignment.get_shifts(
            tmp_files,
            reference_file=reference_file,
            return_image_shape=True,
            lazy_mode=True)
        pad_vectors, ref_pad_vector = alignment.get_pad_vectors(
            file_shifts,
            cube_mode=(len(image_shape) == 3),
            return_reference_image_pad_vector=True)

        # Iterate over file-wise reconstructions
        reconstruction = None
        reconstruction_var = None
        for index, file in enumerate(tmp_files):

            # Read data
            with fits.open(file) as hdu_list:
                tmp_image = hdu_list[0].data
                if var_ext in hdu_list:
                    tmp_image_var = hdu_list[var_ext].data
                else:
                    tmp_image_var = None

            # Initialize or co-add reconstructions and var images
            if reconstruction is None:
                reconstruction = alignment.pad_array(
                    tmp_image,
                    pad_vectors[index],
                    mode=mode,
                    reference_image_pad_vector=ref_pad_vector)
                if tmp_image_var is not None:
                    reconstruction_var = alignment.pad_array(
                        tmp_image_var,
                        pad_vectors[index],
                        mode=mode,
                        reference_image_pad_vector=ref_pad_vector)
            else:
                reconstruction += alignment.pad_array(
                    tmp_image,
                    pad_vectors[index],
                    mode=mode,
                    reference_image_pad_vector=ref_pad_vector)
                if tmp_image_var is not None:
                    reconstruction_var += alignment.pad_array(
                        tmp_image_var,
                        pad_vectors[index],
                        mode=mode,
                        reference_image_pad_vector=ref_pad_vector)
    logger.info("Reconstruction finished...")

    # Save the result to an Outfile
    if outfile is not None:
        outfile.data = reconstruction
        if reconstruction_var is not None:
            outfile.new_extension(name=var_ext, data=reconstruction_var)

    # Return reconstruction (and the variance map if computed)
    if reconstruction_var is not None:
        return reconstruction, reconstruction_var
    return reconstruction
コード例 #24
0
    def __init__(self,
                 file_list,
                 in_dir=None,
                 out_dir=None,
                 out_prefix=None,
                 **kwargs):
        """Create a FileArchive instance.

        Long description...

        Args:
            file_list (str, list):
                Path to list of files or generic file path. Can also be provided as list type.
            in_dir (str, optional):
                Path to the raw/ input data.
            out_dir (str, optional):
                Path to the product/ output data.
            out_prefix (str, optional):
                Prefix of the product/ output data.
        """

        # Store in and out paths
        if in_dir is None:
            self.in_dir = './'
        else:
            self.in_dir = in_dir
        if out_dir is None:
            self.out_dir = './'
        else:
            self.out_dir = out_dir
        if out_prefix is None:
            self.out_prefix = ''
        else:
            self.out_prefix = out_prefix

        # Interpret the file list input
        if isinstance(file_list, str):
            # Search for files
            files = glob.glob(file_list)
            files.sort()
            if len(files) == 0:
                sys.tracebacklimit = 0
                raise FileNotFoundError(
                    "FileArchive did not find any file matching to{!r}.".
                    format(file_list))
            else:
                logger.info(
                    "FileArchive found {} file(s) matching to {!r}.".format(
                        len(files), file_list))

            if len(files) == 1 and not self.is_fits_file(files[0]):
                logger.info(
                    "Input file is not fits type. FileArchive assumes that input file {!r} contains file "
                    "names.".format(files[0]))
                self.table = self.read_table_file(files[0])
            else:
                self.table = self.gather_table_from_list(files=files, **kwargs)
                self.in_dir = os.path.dirname(files[0])

        elif isinstance(file_list, list):
            logger.info("FileArchive received a list of files.")
            self.table = self.gather_table_from_list(files=file_list, **kwargs)

        else:
            raise SpecklepyTypeError("FileArchive", 'file_list',
                                     type(file_list), 'str')

        # Log identified input files
        logger.debug("FileArchive lists the following files:")
        logger.debug(str(self.table))

        # Initialize the index for iteration
        self.index = 0

        # Initialize the list of product files
        self.product_files = None
コード例 #25
0
    def __init__(self, in_files, psf_files, shifts, mode='same', in_dir=None):
        """ Initialize a FourierObject instance.

        Args:
            in_files (list):
                List of paths of the input files.
            psf_files (list):
                List of paths of the PSF files.
            shifts (list):
                List of integer shifts between the files.
            mode (str, optional):
                Define the size of the output image as 'same' to the reference image or expanding to include the 'full'
                covered field. Default is 'same'.
            in_dir (str, optional):
                Path to the input files.
        """

        # Assert that there are the same number of inFiles and psfFiles, which should be the case after running the
        # holography function.
        if not len(in_files) == len(psf_files):
            raise ValueError(
                f"The number of input files ({len(in_files)}) and PSF files ({len(psf_files)}) do not "
                f"match!")
        self.in_files = in_files
        self.psf_files = psf_files
        self.shifts = shifts

        # Check whether mode is supported
        if mode not in ['same', 'full', 'valid']:
            raise SpecklepyValueError(
                'FourierObject',
                argname='mode',
                argvalue=mode,
                expected="either 'same', 'full', or 'valid'")
        self.mode = mode
        if in_dir is None:
            self.in_dir = ''
        else:
            self.in_dir = in_dir

        # Extract padding vectors for images and reference image
        logger.info("Initializing padding vectors")
        # files_contain_data_cubes = fits.getdata(in_files[0]).ndim == 3
        self.pad_vectors, self.reference_image_pad_vector = get_pad_vectors(
            shifts=shifts,
            cube_mode=False,
            return_reference_image_pad_vector=True)
        file_index = 0
        image_pad_vector = self.pad_vectors[file_index]

        # Get example image frame, used as final image size
        image_file = in_files[file_index]
        logger.info(f"\tUsing example image frame from {image_file}")
        img = fits.getdata(os.path.join(
            self.in_dir, image_file))[0]  # Remove time axis padding
        img = pad_array(
            array=img,
            pad_vector=image_pad_vector,
            mode=mode,
            reference_image_pad_vector=self.reference_image_pad_vector)
        logger.info(f"\tShift: {shifts[file_index]}")
        logger.info(f"\tShape: {img.shape}")

        # Get example PSF frame
        psf_file = psf_files[file_index]
        logger.info(f"\tUsing example PSF frame from {psf_file}")
        psf = fits.getdata(psf_file)[0]
        logger.info(f"\tShape: {psf.shape}")

        # Estimate the padding vector for the f_psf frames to have the same xy-extent as f_img
        dx = img.shape[0] - psf.shape[0]
        dy = img.shape[1] - psf.shape[1]
        psf_pad_vector = ((dx // 2, int(np.ceil(dx / 2))),
                          (dy // 2, int(np.ceil(dy / 2))))
        logger.info(f"\tPad_width for PSFs: {psf_pad_vector}")

        # Apply padding to PSF frame
        psf = np.pad(
            psf,
            psf_pad_vector,
            mode='constant',
        )
        if not img.shape == psf.shape:
            raise ValueError(
                f"The Fourier transformed images and PSFs have different shape, {img.shape} and "
                f"{psf.shape}. Something went wrong with the padding!")
        self.psf_pad_vector = psf_pad_vector

        # Initialize the enumerator, denominator and Fourier object attributes
        self.enumerator = np.zeros(img.shape, dtype='complex128')
        self.denominator = np.zeros(img.shape, dtype='complex128')
        self.fourier_image = np.zeros(img.shape, dtype='complex128')
コード例 #26
0
def extract_sources(image,
                    noise_threshold,
                    fwhm,
                    star_finder='DAO',
                    image_var=None,
                    background_subtraction=True,
                    write_to=None,
                    debug=True):
    """Extract sources from an image with a StarFinder routine.

    Long description...

    Args:
        image (np.ndarray or str):
            Image array or the name of a file containing the image array.
        noise_threshold (float):
            Multiple of the uncertainty/ standard deviation of the image.
        fwhm (float):
            Expected full width at half maximum (FWHM) of the sources in units of pixels.
        star_finder (str, optional):
            Choose whether the 'DAO' or 'IRAF' StarFinder implementations from photutils shall be used. Default is
            'DAO'.
        image_var (float or str):
            Variance of the image used for the StarFinder threshold (=noise_threshold * sqrt(image_var)). If not
            provided, the code extracts this value from sigma clipped stats. If provided as str-type, the code tries to
            use this as a key to the FITS file HDU list.
        background_subtraction (bool, optional):
            Let the StarFinder consider the background subtraction. Set False for ignoring background flux. Default is
            `True`.
        write_to (str, optional):
            If provided as a str, the list of identified sources is saved to this file.
        debug (bool, optional):
            Show debugging information. Default is `False`.

    Returns:
        sources (astropy.table.Table): Table of identified sources, None if no
            sources are detected.
    """

    # Set logger level
    if debug:
        logger.setLevel('DEBUG')

    # Input parameters
    if isinstance(image, np.ndarray):
        filename = 'current cube'
    elif isinstance(image, str):
        logger.info(
            "The argument image '{}' is interpreted as file name.".format(
                image))
        filename = image
        image = fits.getdata(filename)
        image = image.squeeze()
    else:
        raise SpecklepyTypeError('extract_sources()',
                                 argname='image',
                                 argtype=type(image),
                                 expected='np.ndarray or str')

    # Prepare noise statistics
    mean, median, std = sigma_clipped_stats(image, sigma=3.0)
    logger.info(
        f"Noise statistics for {filename}:\n\tMean = {mean:.3}\n\tMedian = {median:.3}\n\tStdDev = {std:.3}"
    )

    # Set detection threshold
    if image_var is None:
        threshold = noise_threshold * std
    else:
        if isinstance(image_var, str):
            # Try to load variance extension from file
            image_var = fits.getdata(filename, image_var)
            image_var = np.mean(image_var)
        threshold = noise_threshold * np.sqrt(image_var)

    # Set sky background
    if background_subtraction:
        logger.info(f"Considering mean sky background of {mean}")
        sky = mean
    else:
        sky = 0.0

    # Instantiate StarFinder object
    if not isinstance(star_finder, str):
        raise SpecklepyTypeError('extract_sources',
                                 argname='starfinder',
                                 argtype=type(star_finder),
                                 expected='str')
    if 'dao' in star_finder.lower():
        star_finder = DAOStarFinder(fwhm=fwhm, threshold=threshold, sky=sky)
    elif 'iraf' in star_finder.lower():
        star_finder = IRAFStarFinder(fwhm=fwhm, threshold=threshold, sky=sky)
    else:
        raise SpecklepyValueError('extract_sources',
                                  argname='star_finder',
                                  argvalue=star_finder,
                                  expected="'DAO' or 'IRAF")

    # Find stars
    logger.info("Extracting sources...")
    sources = star_finder(image)

    # Reformatting sources table
    sources.sort('flux', reverse=True)
    sources.rename_column('xcentroid', 'x')
    sources.rename_column('ycentroid', 'y')
    sources.keep_columns(['x', 'y', 'flux'])

    # Add terminal output
    logger.info(f"Extracted {len(sources)} sources")
    logger.debug(sources)

    # Save sources table to file, if requested
    if write_to is not None:
        logger.info("Writing list of sources to file {}".format(write_to))
        sources.write(write_to, format='ascii.fixed_width', overwrite=True)

    return sources
コード例 #27
0
def get_shifts(files,
               reference_file=None,
               mode='correlation',
               lazy_mode=True,
               return_image_shape=False,
               in_dir=None,
               debug=False):
    """Computes the the relative shift of data cubes relative to a reference
    image.

    This function iterates over a list of files and uses the module function get_shift in 'correlation' mode to compute
    the relative shifts of files with respect to a reference file.

    Args:
        files (list or array_like):
            List of files to align.
        reference_file (str, int, optional):
            Path to a reference file or index of the file in files, relative to which the shifts are computed. Default
            is 0.
        mode (str, optional):
            Mode of the shift estimate. In 'correlation' mode, a 2D correlation is used to estimate the shift of the
            array. This is computationally much more expensive than the identical 'maximum' or 'peak' modes, which
            simply identify the coordinates of the emission peaks and return the difference. Though these modes may be
            fooled by reference sources of similar brightness. Passed to get_shift() function. Default is 'correlation'.
        lazy_mode (bool, optional):
            Set to False, to enforce the alignment of a single file with respect to the reference file. Default is True.
        return_image_shape (bool, optional):
            Set to True for for returning the shape of the anticipated output image. Default is False.
        in_dir (str, optional):
            Path to the files. `None` is substituted by an empty string.
        debug (bool, optional):
            If set to True, it shows the 2D correlation.

    Returns:
        shifts (list):
            List of shifts for each file relative to the reference file.
    """

    # Check input parameters
    if not isinstance(files, (list, np.ndarray)):
        if isinstance(files, str):
            files = [files]
        else:
            raise SpecklepyTypeError('get_shifts()',
                                     argname='files',
                                     argtype=type(files),
                                     expected='list')

    if reference_file is None:
        reference_file = files[0]
    elif isinstance(reference_file, int):
        reference_file = files[reference_file]
    elif not isinstance(reference_file, str):
        raise SpecklepyTypeError('get_shifts()',
                                 argname='reference_file',
                                 argtype=type(reference_file),
                                 expected='str')

    if isinstance(mode, str):
        if mode not in ['correlation', 'maximum', 'peak']:
            raise SpecklepyValueError(
                'get_shifts()',
                argname='mode',
                argvalue=mode,
                expected="'correlation', 'maximum' or 'peak'")
    else:
        raise SpecklepyTypeError('get_shifts()',
                                 argname='mode',
                                 argtype=type(mode),
                                 expected='str')

    if not isinstance(lazy_mode, bool):
        raise SpecklepyTypeError('get_shifts()',
                                 argname='lazy_mode',
                                 argtype=type(lazy_mode),
                                 expected='bool')

    if not isinstance(return_image_shape, bool):
        raise SpecklepyTypeError('get_shifts()',
                                 argname='return_image_shape',
                                 argtype=type(return_image_shape),
                                 expected='bool')

    if in_dir is None:
        in_dir = ''

    # Skip computations if only one file is provided
    if lazy_mode and len(files) == 1:
        logger.info("Only one data cube is provided, nothing to align.")
        shifts = [(0, 0)]
        image_shape = fits.getdata(os.path.join(in_dir, files[0])).shape
        image_shape = (image_shape[-2], image_shape[-1])

    # Otherwise estimate shifts
    else:
        shifts = []

        # Identify reference file and Fourier transform the integrated image
        logger.info(
            f"Computing relative shifts between data cubes. Reference file is {reference_file}"
        )
        reference_image = fits.getdata(os.path.join(in_dir, reference_file))
        if reference_image.ndim == 3:
            # Integrating over time axis if reference image is a cube
            reference_image = np.sum(reference_image, axis=0)
        f_reference_image = np.fft.fft2(reference_image)
        image_shape = reference_image.shape
        del reference_image

        # Iterate over files and estimate shift via 2D correlation of the integrated cubes
        for index, file in enumerate(files):
            if file == reference_file:
                shift = (0, 0)
            else:
                image = fits.getdata(os.path.join(in_dir, file))
                if image.ndim == 3:
                    image = np.sum(image, axis=0)
                shift = get_shift(image,
                                  reference_image=f_reference_image,
                                  is_fourier_transformed=True,
                                  mode=mode,
                                  debug=debug)
            shifts.append(shift)
            logger.info(f"Identified a shift of {shift} for file {file}")
        logger.info(f"Identified the following shifts:\n\t{shifts}")

    if return_image_shape:
        return shifts, image_shape
    else:
        return shifts
コード例 #28
0
 def close_application(self):
     choice = QtWidgets.QMessageBox.question(self, 'Close', 'Do you really want to close the application?',
                                             QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No)
     if choice == QtWidgets.QMessageBox.Yes:
         logger.info("Exiting application...")
         sys.exit()
コード例 #29
0
ファイル: get_psf_profile.py プロジェクト: deepin00/specklepy
        imshow(aperture.get_integrated(), maximize=args.maximize)

    xdata, ydata = aperture.get_psf_profile()

    if args.normalize == 'peak':
        ydata /= ydata[0]
    elif args.normalize == 'aperture':
        ydata /= ydata[-1]
    elif args.normalize is not None:
        raise ValueError(
            "Normalize must be either 'peak', 'aperture, or None!'")

    # Save encircled energy data to outfile
    header = "Radius Flux"
    data = np.concatenate(([xdata], [ydata]), axis=0).transpose()
    np.savetxt(outfile, data, header=header)

    if args.debug:
        psf_profile_plot(outfile, maximize=args.maximize)


if __name__ == '__main__':
    try:
        main()
    except KeyboardInterrupt:
        logger.info('Interrupted by user...')
        try:
            sys.exit(0)
        except SystemExit:
            os._exit(0)
コード例 #30
0
def main():

    # Parse args
    parser = GeneralArgParser()
    args = parser.parse_args()

    if args.debug:
        logger.setLevel('DEBUG')
        logger.debug(args)

    if args.gui:
        start()

    # Execute the script of the corresponding command
    if args.command is 'generate':

        # Read parameters from file and generate exposures
        target, telescope, detector, parameters = get_objects(args.parfile,
                                                              debug=args.debug)
        generate_exposure(target=target,
                          telescope=telescope,
                          detector=detector,
                          debug=args.debug,
                          **parameters)

    elif args.command is 'reduce':

        # In setup mode
        if args.setup:
            run.setup(path=args.path,
                      instrument=args.instrument,
                      par_file=args.parfile,
                      list_file=args.filelist,
                      sort_by=args.sortby)
        # Else start reduction following the parameter file
        else:
            params = config.read(args.parfile)
            run.full_reduction(params, debug=args.debug)

    elif args.command is 'ssa':

        # Prepare path information and execute reconstruction
        if args.tmpdir is not None and not os.path.isdir(args.tmpdir):
            os.mkdir(args.tmpdir)
        ssa(args.files,
            mode=args.mode,
            tmp_dir=args.tmpdir,
            outfile=args.outfile,
            box_indexes=args.box_indexes,
            debug=args.debug)

    elif args.command is 'holography':

        # Read parameters from file and execute reconstruction
        defaults_file = os.path.join(os.path.dirname(__file__),
                                     '../config/holography.cfg')
        defaults_file = os.path.abspath(defaults_file)
        params = config.read(defaults_file)
        params = config.update_from_file(params, args.parfile)
        holography(params,
                   mode=params['OPTIONS']['reconstructionMode'],
                   debug=args.debug)

    elif args.command is 'aperture':
        if args.mode == 'psf1d':
            logger.info("Extract 1D PSF profile")
            analysis.get_psf_1d(args.file,
                                args.index,
                                args.radius,
                                args.out_file,
                                args.normalize,
                                debug=args.debug)
        elif args.mode == 'variance':
            logger.info("Extract 1D PSF variation")
            analysis.get_psf_variation(args.file, args.index, args.radius,
                                       args.out_file, args.normalize,
                                       args.debug)
        else:
            logger.warning(f"Aperture mode {args.mode} not recognized!")

    elif args.command is 'extract':
        if args.out_file is None:
            args.out_file = 'sources_' + os.path.basename(
                args.file_name).replace('.fits', '.dat')
        extract_sources(image=args.file_name,
                        noise_threshold=args.noise_threshold,
                        fwhm=args.fwhm,
                        image_var=args.var,
                        write_to=args.out_file)

    elif args.command == 'plot':
        plot = Plot.from_file(file_name=args.file,
                              extension=args.extension,
                              columns=args.columns,
                              format=args.format,
                              layout=args.layout,
                              debug=args.debug)
        plot.apply_layout(layout=args.layout)
        plot.save()
        plot.show()

    elif args.command is 'apodization':
        get_resolution_parameters(wavelength=args.wavelength,
                                  diameter=args.diameter,
                                  pixel_scale=args.pixel_scale)