Exemple #1
0
def get_pixel_drift(coords, files):
    """Get the pixel drift for a given set of coordinates.

    Args:
        coords (`astropy.coordinates.SkyCoord`): Coordinates of source.
        files (list): A list of FITS files with valid WCS.

    Returns:
        `numpy.array, numpy.array`: A 2xN array of pixel deltas where
            N=len(files)
    """
    # Get target positions for each frame
    logger.info("Getting pixel drift for {}".format(coords))
    target_pos = list()
    for fn in files:
        h0 = fits_utils.getheader(fn)
        pos = WCS(h0).all_world2pix(coords.ra, coords.dec, 1)
        target_pos.append(pos)

    target_pos = np.array(target_pos)

    # Subtract out the mean to get just the pixel deltas
    x_pos = target_pos[:, 0]
    y_pos = target_pos[:, 1]

    x_pos -= x_pos.mean()
    y_pos -= y_pos.mean()

    return x_pos, y_pos
Exemple #2
0
def get_pixel_drift(coords, files):
    """Get the pixel drift for a given set of coordinates.

    Args:
        coords (`astropy.coordinates.SkyCoord`): Coordinates of source.
        files (list): A list of FITS files with valid WCS.

    Returns:
        `numpy.array, numpy.array`: A 2xN array of pixel deltas where
            N=len(files)
    """
    # Get target positions for each frame
    logger.info("Getting pixel drift for {}".format(coords))
    target_pos = list()
    for fn in files:
        h0 = fits_utils.getheader(fn)
        pos = WCS(h0).all_world2pix(coords.ra, coords.dec, 1)
        target_pos.append(pos)

    target_pos = np.array(target_pos)

    # Subtract out the mean to get just the pixel deltas
    x_pos = target_pos[:, 0]
    y_pos = target_pos[:, 1]

    x_pos -= x_pos.mean()
    y_pos -= y_pos.mean()

    return x_pos, y_pos
Exemple #3
0
def _lookup_via_photutils(fits_file, wcs=None, *args, **kwargs):
    from photutils import DAOStarFinder
    data = fits.getdata(fits_file) - 2048  # Camera bias
    mean, median, std = sigma_clipped_stats(data)

    fwhm = kwargs.get('fwhm', 3.0)
    threshold = kwargs.get('threshold', 3.0)

    daofind = DAOStarFinder(fwhm=fwhm, threshold=threshold * std)
    sources = daofind(data - median).to_pandas()

    sources.rename(columns={
        'xcentroid': 'x',
        'ycentroid': 'y',
    }, inplace=True)

    if wcs is None:
        header = fits_utils.getheader(fits_file)
        wcs = WCS(header)

    coords = wcs.all_pix2world(sources['x'], sources['y'], 1)

    sources['ra'] = coords[0]
    sources['dec'] = coords[1]

    return sources
Exemple #4
0
def _lookup_via_photutils(fits_file, wcs=None, *args, **kwargs):
    from photutils import DAOStarFinder
    data = fits.getdata(fits_file) - 2048  # Camera bias
    mean, median, std = sigma_clipped_stats(data)

    fwhm = kwargs.get('fwhm', 3.0)
    threshold = kwargs.get('threshold', 3.0)

    daofind = DAOStarFinder(fwhm=fwhm, threshold=threshold * std)
    sources = daofind(data - median).to_pandas()

    sources.rename(columns={
        'xcentroid': 'x',
        'ycentroid': 'y',
    },
                   inplace=True)

    if wcs is None:
        header = fits_utils.getheader(fits_file)
        wcs = WCS(header)

    coords = wcs.all_pix2world(sources['x'], sources['y'], 1)

    sources['ra'] = coords[0]
    sources['dec'] = coords[1]

    return sources
Exemple #5
0
def lookup_point_sources(fits_file,
                         catalog_match=True,
                         method='sextractor',
                         force_new=False,
                         **kwargs):
    """ Extract point sources from image

    Args:
        fits_file (str, optional): Path to FITS file to search for stars.
        force_new (bool, optional): Force a new catalog to be created,
            defaults to False

    Raises:
        error.InvalidSystemCommand: Description
    """
    if catalog_match or method == 'tess_catalog':
        fits_header = fits_utils.getheader(fits_file)
        wcs = WCS(fits_header)
        assert wcs is not None and wcs.is_celestial, logger.warning(
            "Need a valid WCS")

    logger.info("Looking up sources for {}".format(fits_file))

    lookup_function = {
        'sextractor': _lookup_via_sextractor,
        'tess_catalog': _lookup_via_tess_catalog,
        'photutils': _lookup_via_photutils,
    }

    # Lookup our appropriate method and call it with the fits file and kwargs
    try:
        logger.debug("Using {} method {}".format(method,
                                                 lookup_function[method]))
        point_sources = lookup_function[method](fits_file,
                                                force_new=force_new,
                                                **kwargs)
    except Exception as e:
        logger.error("Problem looking up sources: {}".format(e))
        raise Exception("Problem lookup up sources: {}".format(e))

    if catalog_match:
        logger.debug(f'Doing catalog match against stars')
        point_sources = get_catalog_match(point_sources, wcs, **kwargs)
        logger.debug(f'Done with catalog match')

    # Change the index to the picid
    point_sources.set_index('id', inplace=True)
    point_sources.index.rename('picid', inplace=True)

    # Remove those with more than one entry
    counts = point_sources.x.groupby('picid').count()
    single_entry = counts == 1
    single_index = single_entry.loc[single_entry].index
    unique_sources = point_sources.loc[single_index]

    return unique_sources
Exemple #6
0
def lookup_point_sources(fits_file,
                         catalog_match=True,
                         method='sextractor',
                         force_new=False,
                         **kwargs
                         ):
    """ Extract point sources from image

    Args:
        fits_file (str, optional): Path to FITS file to search for stars.
        force_new (bool, optional): Force a new catalog to be created,
            defaults to False

    Raises:
        error.InvalidSystemCommand: Description
    """
    if catalog_match or method == 'tess_catalog':
        fits_header = fits_utils.getheader(fits_file)
        wcs = WCS(fits_header)
        assert wcs is not None and wcs.is_celestial, logger.warning("Need a valid WCS")

    logger.info("Looking up sources for {}".format(fits_file))

    lookup_function = {
        'sextractor': _lookup_via_sextractor,
        'tess_catalog': _lookup_via_tess_catalog,
        'photutils': _lookup_via_photutils,
    }

    # Lookup our appropriate method and call it with the fits file and kwargs
    try:
        logger.debug("Using {} method {}".format(method, lookup_function[method]))
        point_sources = lookup_function[method](fits_file, force_new=force_new, **kwargs)
    except Exception as e:
        logger.error("Problem looking up sources: {}".format(e))
        raise Exception("Problem lookup up sources: {}".format(e))

    if catalog_match:
        logger.debug(f'Doing catalog match against stars')
        point_sources = get_catalog_match(point_sources, wcs, **kwargs)
        logger.debug(f'Done with catalog match')

    # Change the index to the picid
    point_sources.set_index('id', inplace=True)
    point_sources.index.rename('picid', inplace=True)

    # Remove those with more than one entry
    counts = point_sources.x.groupby('picid').count()
    single_entry = counts == 1
    single_index = single_entry.loc[single_entry].index
    unique_sources = point_sources.loc[single_index]

    return unique_sources
Exemple #7
0
    def _process_fits(self, file_path, metadata):
        file_path = super()._process_fits(file_path, metadata)
        self.logger.debug('Overriding mount coordinates for camera simulator')
        # TODO get the path as package data or something better.
        solved_path = os.path.join(
            os.environ['POCS'],
            'tests',
            'data',
            'solved.fits.fz'
        )
        solved_header = fits_utils.getheader(solved_path)
        with fits.open(file_path, 'update') as f:
            hdu = f[0]
            hdu.header.set('RA-MNT', solved_header['RA-MNT'], 'Degrees')
            hdu.header.set('HA-MNT', solved_header['HA-MNT'], 'Degrees')
            hdu.header.set('DEC-MNT', solved_header['DEC-MNT'], 'Degrees')

        self.logger.debug("Headers updated for simulated image.")
        return file_path
Exemple #8
0
def _make_pretty_from_fits(fname=None,
                           title=None,
                           figsize=(10, 10 / 1.325),
                           dpi=150,
                           alpha=0.2,
                           number_ticks=7,
                           clip_percent=99.9,
                           **kwargs):
    data = mask_saturated(fits_utils.getdata(fname))
    header = fits_utils.getheader(fname)
    wcs = WCS(header)

    if not title:
        field = header.get('FIELD', 'Unknown field')
        exptime = header.get('EXPTIME', 'Unknown exptime')
        filter_type = header.get('FILTER', 'Unknown filter')

        try:
            date_time = header['DATE-OBS']
        except KeyError:
            # If we don't have DATE-OBS, check filename for date
            try:
                basename = os.path.splitext(os.path.basename(fname))[0]
                date_time = date_parse(basename).isoformat()
            except Exception:  # pragma: no cover
                # Otherwise use now
                date_time = current_time(pretty=True)

        date_time = date_time.replace('T', ' ', 1)

        title = f'{field} ({exptime}s {filter_type}) {date_time}'

    norm = ImageNormalize(interval=PercentileInterval(clip_percent),
                          stretch=LogStretch())

    fig = Figure()
    FigureCanvas(fig)
    fig.set_size_inches(*figsize)
    fig.dpi = dpi

    if wcs.is_celestial:
        ax = fig.add_subplot(1, 1, 1, projection=wcs)
        ax.coords.grid(True, color='white', ls='-', alpha=alpha)

        ra_axis = ax.coords['ra']
        ra_axis.set_axislabel('Right Ascension')
        ra_axis.set_major_formatter('hh:mm')
        ra_axis.set_ticks(number=number_ticks,
                          color='white',
                          exclude_overlapping=True)

        dec_axis = ax.coords['dec']
        dec_axis.set_axislabel('Declination')
        dec_axis.set_major_formatter('dd:mm')
        dec_axis.set_ticks(number=number_ticks,
                           color='white',
                           exclude_overlapping=True)
    else:
        ax = fig.add_subplot(111)
        ax.grid(True, color='white', ls='-', alpha=alpha)

        ax.set_xlabel('X / pixels')
        ax.set_ylabel('Y / pixels')

    im = ax.imshow(data, norm=norm, cmap=get_palette(), origin='lower')
    add_colorbar(im)
    fig.suptitle(title)

    new_filename = re.sub(r'.fits(.fz)?', '.jpg', fname)
    fig.savefig(new_filename, bbox_inches='tight')

    # explicitly close and delete figure
    fig.clf()
    del fig

    return new_filename
def test_getheader(solved_fits_file):
    header = fits_utils.getheader(solved_fits_file)
    assert isinstance(header, Header)
    assert header['IMAGEID'] == 'PAN001_XXXXXX_20160909T081152'
Exemple #10
0
def lookup_sources_for_observation(fits_files=None,
                                   filename=None,
                                   force_new=False,
                                   cursor=None,
                                   use_intersection=False,
                                   **kwargs):

    if force_new:
        logger.info(f'Forcing a new source file')
        with suppress(FileNotFoundError):
            os.remove(filename)

    try:
        logger.info(f'Using existing source file: {filename}')
        observation_sources = pd.read_csv(filename, parse_dates=True)
        observation_sources['obstime'] = pd.to_datetime(
            observation_sources.obstime)

    except FileNotFoundError:
        if not cursor:
            cursor = get_cursor(port=5433, db_name='v702', db_user='******')

        logger.info(f'Looking up sources in {len(fits_files)} files')
        observation_sources = None

        # Lookup the point sources for all frames
        for fn in tqdm(fits_files):
            point_sources = lookup_point_sources(fn,
                                                 force_new=force_new,
                                                 cursor=cursor,
                                                 **kwargs)
            header = fits_utils.getheader(fn)
            obstime = Time(pd.to_datetime(os.path.basename(fn).split('.')[0]))
            exptime = header['EXPTIME'] * u.second

            obstime += (exptime / 2)

            point_sources['obstime'] = obstime.datetime
            point_sources['exptime'] = exptime
            point_sources['airmass'] = header['AIRMASS']
            point_sources['file'] = os.path.basename(fn)
            point_sources['picid'] = point_sources.index

            logger.info(f'Combining sources with previous observations')
            if observation_sources is not None:
                if use_intersection:
                    logger.info(f'Getting intersection of sources')

                    idx_intersection = observation_sources.index.intersection(
                        point_sources.index)
                    logger.info(
                        f'Num sources in intersection: {len(idx_intersection)}'
                    )
                    observation_sources = pd.concat([
                        observation_sources.loc[idx_intersection],
                        point_sources.loc[idx_intersection]
                    ],
                                                    join='inner')
                else:
                    observation_sources = pd.concat(
                        [observation_sources, point_sources])
            else:
                observation_sources = point_sources

        logger.info(f'Writing sources out to file')
        observation_sources.to_csv(filename)

    observation_sources.set_index(['obstime'], inplace=True)
    return observation_sources
Exemple #11
0
def lookup_sources_for_observation(fits_files=None,
                                   filename=None,
                                   force_new=False,
                                   cursor=None,
                                   use_intersection=False,
                                   **kwargs
                                   ):

    if force_new:
        logger.info(f'Forcing a new source file')
        with suppress(FileNotFoundError):
            os.remove(filename)

    try:
        logger.info(f'Using existing source file: {filename}')
        observation_sources = pd.read_csv(filename, parse_dates=True)
        observation_sources['obstime'] = pd.to_datetime(observation_sources.obstime)

    except FileNotFoundError:
        if not cursor:
            cursor = get_cursor(port=5433, db_name='v702', db_user='******')

        logger.info(f'Looking up sources in {len(fits_files)} files')
        observation_sources = None

        # Lookup the point sources for all frames
        for fn in tqdm(fits_files):
            point_sources = lookup_point_sources(
                fn,
                force_new=force_new,
                cursor=cursor,
                **kwargs
            )
            header = fits_utils.getheader(fn)
            obstime = Time(pd.to_datetime(os.path.basename(fn).split('.')[0]))
            exptime = header['EXPTIME'] * u.second

            obstime += (exptime / 2)

            point_sources['obstime'] = obstime.datetime
            point_sources['exptime'] = exptime
            point_sources['airmass'] = header['AIRMASS']
            point_sources['file'] = os.path.basename(fn)
            point_sources['picid'] = point_sources.index

            logger.info(f'Combining sources with previous observations')
            if observation_sources is not None:
                if use_intersection:
                    logger.info(f'Getting intersection of sources')

                    idx_intersection = observation_sources.index.intersection(point_sources.index)
                    logger.info(f'Num sources in intersection: {len(idx_intersection)}')
                    observation_sources = pd.concat([observation_sources.loc[idx_intersection],
                                                     point_sources.loc[idx_intersection]],
                                                    join='inner')
                else:
                    observation_sources = pd.concat([observation_sources, point_sources])
            else:
                observation_sources = point_sources

        logger.info(f'Writing sources out to file')
        observation_sources.to_csv(filename)

    observation_sources.set_index(['obstime'], inplace=True)
    return observation_sources
def lookup_point_sources(
        fits_file,
        catalog_match=True,
        method='sextractor',
        force_new=False,
        max_catalog_separation=25,  # arcsecs
        **kwargs):
    """ Extract point sources from image

    Args:
        fits_file (str, optional): Path to FITS file to search for stars.
        force_new (bool, optional): Force a new catalog to be created,
            defaults to False

    Raises:
        error.InvalidSystemCommand: Description
    """
    def _print(msg):
        if 'logger' in kwargs:
            logger.debug(msg)
        else:
            print(msg)

    if catalog_match or method == 'tess_catalog':
        fits_header = fits_utils.getheader(fits_file)
        wcs = WCS(fits_header)
        assert wcs is not None and wcs.is_celestial, logging.warning(
            "Need a valid WCS")

    _print(f"Looking up sources for {fits_file}")

    lookup_function = {
        'sextractor': _lookup_via_sextractor,
        'tess_catalog': _lookup_via_tess_catalog,
    }

    # Lookup our appropriate method and call it with the fits file and kwargs
    try:
        _print(f"Using {method} method for {fits_file}")
        point_sources = lookup_function[method](fits_file,
                                                force_new=force_new,
                                                **kwargs)
    except Exception as e:
        _print(f"Problem looking up sources: {e!r} {fits_file}")
        raise Exception(f"Problem looking up sources: {e!r} {fits_file}")

    if catalog_match:
        _print(f'Doing catalog match against stars {fits_file}')
        try:
            point_sources = get_catalog_match(point_sources, wcs, **kwargs)
        except Exception as e:
            _print(f'Error in catalog match: {e!r} {fits_file}')
        _print(f'Done with catalog match {fits_file}')

    # Change the index to the picid
    point_sources.set_index('picid', inplace=True)
    _print(f'Point sources: {len(point_sources)}')

    # Remove catalog matches that are too large
    _print(
        f'Removing matches that are greater than {max_catalog_separation} arcsec from catalog.'
    )
    point_sources = point_sources.loc[
        point_sources.catalog_sep_arcsec < max_catalog_separation]
    _print(f'Point sources: {len(point_sources)} {fits_file}')

    return point_sources