def download_tgss_image(url):
    """Download an image for TGSS.

    Parameters
    ----------
    url:
        url of the image.

    Returns
    -------
    file_url:
        Url of the newly downloaded file.
    """

    # parsing ra and dec from the url
    query = urlparse(url).query
    h_pos = parse_qs(query)['hPOS'][0]
    ra, dec = h_pos.split(',')

    # name of the temporary tar file to be saved locally
    local_file_name = '{}_{}.tar'.format(ra, dec)

    # getting the tar file downloaded
    request = requests.get(url, stream=True)
    with open(local_file_name, 'wb') as f:
        for chunk in request.iter_content(chunk_size=1024):
            if chunk:
                f.write(chunk)
        f.flush()

    tar = tarfile.open(local_file_name)
    members = tar.getmembers()

    # temp folder for the fits file
    temp_folder = local_file_name.replace('.tar', '')
    tar.extract(member=members[0], path=temp_folder)

    # removing the temporary file
    os.remove(local_file_name)

    fits_image = temp_folder + '/' + members[0].name
    hdu_list = fits.open(fits_image)
    stretch = vis.AsinhStretch(0.01) + vis.MinMaxInterval()
    file_url = settings.MEDIA_ROOT + 'database_images/' + temp_folder + '_tgss.png'

    image_data = hdu_list[0].data[0, 0]

    try:
        imsave(file_url, stretch(image_data), cmap='copper')
    except OSError:
        logger.info("Something is wrong with the fits file for url = {}".format(url))
        logger.error(OSError)
        file_url = settings.MEDIA_ROOT + 'database_images/no_image.png'

    hdu_list.close()
    shutil.rmtree(temp_folder)

    return file_url
Esempio n. 2
0
def image_norm(image_data, stretch=viz.AsinhStretch):
    """
    Create the ImageNormalize object based on the desired stretch and
    pixel value range.

    See http://docs.astropy.org/en/stable/visualization/normalization.html
    """
    interval = viz.MinMaxInterval()
    vmin, vmax = interval.get_limits(image_data.flatten())
    norm = ImageNormalize(vmin=vmin, vmax=vmax, stretch=stretch())
    return norm
Esempio n. 3
0
    def plot_norm(self,
                  stretch='linear',
                  power=1.0,
                  asinh_a=0.1,
                  min_cut=None,
                  max_cut=None,
                  min_percent=None,
                  max_percent=None,
                  percent=None,
                  clip=True):
        """Create a matplotlib norm object for plotting.

        This is a copy of this function that will be available in Astropy 1.3:
        `astropy.visualization.mpl_normalize.simple_norm`

        See the parameter description there!

        Examples
        --------
        >>> image = SkyImage()
        >>> norm = image.plot_norm(stretch='sqrt', max_percent=99)
        >>> image.plot(norm=norm)
        """
        import astropy.visualization as v
        from astropy.visualization.mpl_normalize import ImageNormalize

        if percent is not None:
            interval = v.PercentileInterval(percent)
        elif min_percent is not None or max_percent is not None:
            interval = v.AsymmetricPercentileInterval(min_percent or 0.,
                                                      max_percent or 100.)
        elif min_cut is not None or max_cut is not None:
            interval = v.ManualInterval(min_cut, max_cut)
        else:
            interval = v.MinMaxInterval()

        if stretch == 'linear':
            stretch = v.LinearStretch()
        elif stretch == 'sqrt':
            stretch = v.SqrtStretch()
        elif stretch == 'power':
            stretch = v.PowerStretch(power)
        elif stretch == 'log':
            stretch = v.LogStretch()
        elif stretch == 'asinh':
            stretch = v.AsinhStretch(asinh_a)
        else:
            raise ValueError('Unknown stretch: {0}.'.format(stretch))

        vmin, vmax = interval.get_limits(self.data)

        return ImageNormalize(vmin=vmin, vmax=vmax, stretch=stretch, clip=clip)
Esempio n. 4
0
def download_first_image(url, galaxy):
    """
    Download first image from the url
    :param url: link of the image
    :param galaxy: galaxy object
    :return: url of the saved image
    """
    stretch = vis.AsinhStretch(0.01) + vis.MinMaxInterval()

    file_url = settings.MEDIA_ROOT + 'database_images/' + galaxy.first + '.png'
    try:
        imsave(file_url, stretch(fits.open(download_file(url, cache=True))[0].data), cmap='inferno')
    except OSError:
        logger.info("Something is wrong with the fits file for url = {}".format(url))
        logger.error(OSError)
        file_url = settings.MEDIA_ROOT + 'temp_images/no_image.png'

    return file_url
Esempio n. 5
0
def plot_image_and_lines(cube,
                         wavs,
                         xrange,
                         yrange,
                         Hbeta_ref=None,
                         title='',
                         filename=None,
                         include_OIII=False):

    zpix = np.arange(0, cube.shape[0])
    lambda_delta = 5
    hbeta_z = np.where((np.array(wavs) > h_beta_std.value-lambda_delta)\
                       & (np.array(wavs) < h_beta_std.value+lambda_delta))[0]
    image = np.mean(cube[min(hbeta_z):max(hbeta_z) + 1, :, :], axis=0)

    spect = [
        np.mean(cube[z, yrange[0]:yrange[1] + 1, xrange[0]:xrange[1] + 1])
        for z in zpix
    ]
    i_peak = spect.index(max(spect))

    background_0 = models.Polynomial1D(degree=2)
    H_beta_0 = models.Gaussian1D(amplitude=500,
                                 mean=4861,
                                 stddev=1.,
                                 bounds={
                                     'mean': (4855, 4865),
                                     'stddev': (0.1, 5)
                                 })
    OIII4959_0 = models.Gaussian1D(amplitude=100,
                                   mean=4959,
                                   stddev=1.,
                                   bounds={
                                       'mean': (4955, 4965),
                                       'stddev': (0.1, 5)
                                   })
    OIII5007_0 = models.Gaussian1D(amplitude=200,
                                   mean=5007,
                                   stddev=1.,
                                   bounds={
                                       'mean': (5002, 5012),
                                       'stddev': (0.1, 5)
                                   })
    fitter = fitting.LevMarLSQFitter()
    if include_OIII is True:
        model0 = background_0 + H_beta_0 + OIII4959_0 + OIII5007_0
    else:
        model0 = background_0 + H_beta_0

    model0.mean_1 = wavs[i_peak]
    model = fitter(model0, wavs, spect)
    residuals = np.array(spect - model(wavs))

    plt.figure(figsize=(20, 8))

    plt.subplot(1, 4, 1)
    plt.title(title)
    norm = v.ImageNormalize(image,
                            interval=v.MinMaxInterval(),
                            stretch=v.LogStretch(1))
    plt.imshow(image, origin='lower', norm=norm)
    region_x = [
        xrange[0] - 0.5, xrange[1] + 0.5, xrange[1] + 0.5, xrange[0] - 0.5,
        xrange[0] - 0.5
    ]
    region_y = [
        yrange[0] - 0.5, yrange[0] - 0.5, yrange[1] + 0.5, yrange[1] + 0.5,
        yrange[0] - 0.5
    ]
    plt.plot(region_x, region_y, 'r-', alpha=0.5, lw=2)

    plt.subplot(1, 4, 2)
    if Hbeta_ref is not None:
        Hbeta_velocity = (model.mean_1.value * u.Angstrom).to(
            u.km / u.s, equivalencies=u.doppler_optical(Hbeta_ref))
        title = f'H-beta ({model.mean_1.value:.1f} A, v={Hbeta_velocity.value:.1f} km/s)'
    else:
        title = f'H-beta ({model.mean_1.value:.1f} A, sigma={model.stddev_1.value:.3f} A)'
    plt.title(title)
    w = [l for l in np.arange(4856, 4866, 0.05)]
    if Hbeta_ref is not None:
        vs = [(l * u.Angstrom).to(
            u.km / u.s, equivalencies=u.doppler_optical(Hbeta_ref)).value
              for l in wavs]
        plt.plot(vs, spect, drawstyle='steps-mid', label='data')
        vs = [(l * u.Angstrom).to(
            u.km / u.s, equivalencies=u.doppler_optical(Hbeta_ref)).value
              for l in w]
        plt.plot(vs, model(w), 'r-', alpha=0.7, label='Fit')
        plt.xlabel('Velocity (km/s)')
        plt.xlim(-200, 200)
    else:
        plt.plot(wavs, spect, drawstyle='steps-mid', label='data')
        plt.plot(w, model(w), 'r-', alpha=0.7, label='Fit')
        plt.xlabel('Wavelength (angstroms)')
        plt.xlim(4856, 4866)
    plt.grid()
    plt.ylabel('Flux')
    plt.legend(loc='best')

    plt.subplot(1, 4, 3)
    if include_OIII is True:
        title = f'OIII 4959 ({model.mean_2.value:.1f} A, sigma={model.stddev_2.value:.3f} A)'
    else:
        title = f'OIII 4959'
    plt.title(title)
    plt.plot(wavs, spect, drawstyle='steps-mid', label='data')
    w = [l for l in np.arange(4954, 4964, 0.05)]
    plt.plot(w, model(w), 'r-', alpha=0.7, label='Fit')
    plt.xlabel('Wavelength (angstroms)')
    plt.ylabel('Flux')
    plt.legend(loc='best')
    plt.xlim(4954, 4964)

    plt.subplot(1, 4, 4)
    if include_OIII is True:
        title = f'OIII 5007 ({model.mean_3.value:.1f} A, sigma={model.stddev_3.value:.3f} A)'
    else:
        title = f'OIII 5007'
    plt.title(title)
    plt.plot(wavs, spect, drawstyle='steps-mid', label='data')
    w = [l for l in np.arange(5002, 5012, 0.05)]
    plt.plot(w, model(w), 'r-', alpha=0.7, label='Fit')
    plt.xlabel('Wavelength (angstroms)')
    plt.ylabel('Flux')
    plt.legend(loc='best')
    plt.xlim(5002, 5012)

    if filename is not None:
        plt.savefig(filename, bbox_inches='tight', pad_inches=0.10)
    else:
        plt.show()

    return spect, model
Esempio n. 6
0
 def stretchInterval(self):
     if self._stretchInterval is None:
         print('Using default MinMaxInterval stretch interval')
         return astrovis.MinMaxInterval()
     return self._stretchInterval
Esempio n. 7
0
 def set_normalization(self,
                       stretch=None,
                       interval=None,
                       stretchkwargs={},
                       intervalkwargs={},
                       perm_linear=None):
     if stretch is None:
         if self.stretch is None:
             stretch = 'linear'
         else:
             stretch = self.stretch
     if isinstance(stretch, str):
         print(stretch,
               ' '.join([f'{k}={v}' for k, v in stretchkwargs.items()]))
         if self.data is None:  #can not calculate objects yet
             self.stretch_kwargs = stretchkwargs
         else:
             kwargs = self.prepare_kwargs(
                 self.stretch_kws_defaults[stretch], self.stretch_kwargs,
                 stretchkwargs)
             if perm_linear is not None:
                 perm_linear_kwargs = self.prepare_kwargs(
                     self.stretch_kws_defaults['linear'], perm_linear)
                 print(
                     'linear', ' '.join([
                         f'{k}={v}' for k, v in perm_linear_kwargs.items()
                     ]))
                 if stretch == 'asinh':  # arg: a=0.1
                     stretch = vis.CompositeStretch(
                         vis.LinearStretch(**perm_linear_kwargs),
                         vis.AsinhStretch(**kwargs))
                 elif stretch == 'contrastbias':  # args: contrast, bias
                     stretch = vis.CompositeStretch(
                         vis.LinearStretch(**perm_linear_kwargs),
                         vis.ContrastBiasStretch(**kwargs))
                 elif stretch == 'histogram':
                     stretch = vis.CompositeStretch(
                         vis.HistEqStretch(self.data, **kwargs),
                         vis.LinearStretch(**perm_linear_kwargs))
                 elif stretch == 'log':  # args: a=1000.0
                     stretch = vis.CompositeStretch(
                         vis.LogStretch(**kwargs),
                         vis.LinearStretch(**perm_linear_kwargs))
                 elif stretch == 'powerdist':  # args: a=1000.0
                     stretch = vis.CompositeStretch(
                         vis.LinearStretch(**perm_linear_kwargs),
                         vis.PowerDistStretch(**kwargs))
                 elif stretch == 'power':  # args: a
                     stretch = vis.CompositeStretch(
                         vis.PowerStretch(**kwargs),
                         vis.LinearStretch(**perm_linear_kwargs))
                 elif stretch == 'sinh':  # args: a=0.33
                     stretch = vis.CompositeStretch(
                         vis.LinearStretch(**perm_linear_kwargs),
                         vis.SinhStretch(**kwargs))
                 elif stretch == 'sqrt':
                     stretch = vis.CompositeStretch(
                         vis.SqrtStretch(),
                         vis.LinearStretch(**perm_linear_kwargs))
                 elif stretch == 'square':
                     stretch = vis.CompositeStretch(
                         vis.LinearStretch(**perm_linear_kwargs),
                         vis.SquaredStretch())
                 else:
                     raise ValueError('Unknown stretch:' + stretch)
             else:
                 if stretch == 'linear':  # args: slope=1, intercept=0
                     stretch = vis.LinearStretch(**kwargs)
                 else:
                     raise ValueError('Unknown stretch:' + stretch)
     self.stretch = stretch
     if interval is None:
         if self.interval is None:
             interval = 'zscale'
         else:
             interval = self.interval
     if isinstance(interval, str):
         print(interval,
               ' '.join([f'{k}={v}' for k, v in intervalkwargs.items()]))
         kwargs = self.prepare_kwargs(self.interval_kws_defaults[interval],
                                      self.interval_kwargs, intervalkwargs)
         if self.data is None:
             self.interval_kwargs = intervalkwargs
         else:
             if interval == 'minmax':
                 interval = vis.MinMaxInterval()
             elif interval == 'manual':  # args: vmin, vmax
                 interval = vis.ManualInterval(**kwargs)
             elif interval == 'percentile':  # args: percentile, n_samples
                 interval = vis.PercentileInterval(**kwargs)
             elif interval == 'asymetric':  # args: lower_percentile, upper_percentile, n_samples
                 interval = vis.AsymmetricPercentileInterval(**kwargs)
             elif interval == 'zscale':  # args: nsamples=1000, contrast=0.25, max_reject=0.5, min_npixels=5, krej=2.5, max_iterations=5
                 interval = vis.ZScaleInterval(**kwargs)
             else:
                 raise ValueError('Unknown interval:' + interval)
     self.interval = interval
     if self.img is not None:
         self.img.set_norm(
             vis.ImageNormalize(self.data,
                                interval=self.interval,
                                stretch=self.stretch,
                                clip=True))
Esempio n. 8
0
import matplotlib.pyplot as plt
from astropy.io import fits
import astropy.visualization as viz

image_name = input("Please enter the name of the file : ")
hdul = fits.open(image_name)
hdul.info()
header_number = int(input("Enter Header number whose data  you want view : "))
image = hdul[header_number].data
hdul.close()
##stretching and normalizing using LogStretch() and MinMaxInterval() like in DS9
log_param = float(input("Enter base value for logrithmic stretch : "))
norm = viz.ImageNormalize(image,
                          interval=viz.MinMaxInterval(),
                          stretch=viz.LogStretch())
plt.imshow(image, cmap='gray')
plt.show()
Esempio n. 9
0
import random
import keract

plt.style.use("seaborn-darkgrid")

DATA_CSV = '/full/path/to/csv'
IMG_FOLD = '/full/path/to/images'  #for me, ends with /Public/EUC_VIS/
DES_IMG_SIZE = 200
NUM_IMAGES = 50000  # can go up to nearly 100000, but there are a few entries with now images
BATCHSIZE = 200
EPOCHS = 20  # I liked doing 20 for quick ones, 40 for longer. maybe we should try more epochs?

# Creating the two normalization objects which
# can also work as functions later on in the
# data preprocessing
norm = avis.MinMaxInterval()
stretch = avis.AsinhStretch(0.010)


def read_csv(filename: str,
             num_images: int,
             to_skip: list[int] = []) -> pd.DataFrame:
    """
    Reads in the csv and trims it down to just the desired size and
    necessary columns. Also adds a column of booleans corresponding
    to whether a particular observation has been assigned to the
    training or testing pool. Training corresponding to True.
    """
    df = pd.read_csv(filename, skiprows=26)
    df = df[["ID", "n_sources", "n_source_im", "mag_eff", "n_pix_source"]]
    df["should_detect"] = ((df["n_source_im"] > 0) & (df["mag_eff"] > 1.6) &
def rgbfig(
        figfilename="SgrB2N_RGB.pdf",
        lims=[([266.83404223, 266.83172659]), ([-28.373138, -28.3698755])],
        scalebarx=coordinates.SkyCoord(266.833545 * u.deg,
                                       -28.37283819 * u.deg),
        redfn=paths.Fpath('SGRB2N-2012-Q.DePree_K.recentered.fits'),
        greenfn=paths.
    Fpath('sgr_b2m.N.B3.allspw.continuum.r0.5.clean1000.image.tt0.pbcor.fits'),
        bluefn=paths.
    Fpath('sgr_b2m.N.B6.allspw.continuum.r0.5.clean1000.image.tt0.pbcor.fits'),
        redpercentile=99.99,
        greenpercentile=99.99,
        bluepercentile=99.99,
        stretch=visualization.AsinhStretch(),
):

    header = fits.getheader(redfn)
    celwcs = wcs.WCS(header).celestial

    redhdu = fits.open(redfn)
    greenhdu = fits.open(greenfn)
    bluehdu = fits.open(bluefn)

    greendata, _ = reproject.reproject_interp(
        (greenhdu[0].data, wcs.WCS(greenhdu[0].header).celestial),
        celwcs,
        shape_out=redhdu[0].data.squeeze().shape)
    bluedata, _ = reproject.reproject_interp(
        (bluehdu[0].data, wcs.WCS(bluehdu[0].header).celestial),
        celwcs,
        shape_out=redhdu[0].data.squeeze().shape)

    #def rescale(x):
    #    return (x-np.nanmin(x))/(np.nanmax(x) - np.nanmin(x))
    redrescale = visualization.PercentileInterval(redpercentile)
    greenrescale = visualization.PercentileInterval(greenpercentile)
    bluerescale = visualization.PercentileInterval(bluepercentile)

    rgb = np.array([
        stretch(redrescale(redhdu[0].data.squeeze())),
        stretch(greenrescale(greendata)),
        stretch(bluerescale(bluedata)),
    ]).swapaxes(0, 2).swapaxes(0, 1)

    norm = visualization.ImageNormalize(
        rgb, interval=visualization.MinMaxInterval(), stretch=stretch)

    fig1 = pl.figure(1)
    fig1.clf()
    ax = fig1.add_subplot(1, 1, 1, projection=celwcs)
    pl.imshow(rgb, origin='lower', interpolation='none', norm=norm)

    (x1, x2), (y1, y2) = celwcs.wcs_world2pix(lims[0], lims[1], 0)
    ax.axis((x1, x2, y1, y2))

    visualization_tools.make_scalebar(ax,
                                      left_side=scalebarx,
                                      length=1.213 * u.arcsec,
                                      label='0.05 pc')

    pl.savefig(paths.fpath(figfilename), bbox_inches='tight')
Esempio n. 11
0
 
 position = (x, y)
 imData = fileData[0].data[0][0] #Shape it correctly. Assumes 4D. Might rewrite to generalise.
 cutout = Cutout2D(imData, position, size, mode='partial') #Anything past the border filled with NaN
 
 image = np.array(cutout.data) #Get the image data as a np array.
 
 empty = np.isnan(image) #Get all NaN pixels and replace with normalised noise.
 image[empty] = np.random.normal(loc=np.mean(image[~empty]), scale=np.std(image[~empty]), size=image.shape)[empty] # replace missing values with noise  
 image = image - np.min(image) #Set base intensity to 0
 
 # clip background
 image = np.clip(image, clip_threshold*np.std(image), 1e10)
 
 # Normalize using minmax
 mminterval = vis.MinMaxInterval()
 image = mminterval(image)
 
 image.astype('f').tofile(output) #Write to the Binary file
 
 #Append the info to our catalogue
 entry = []
 entry.append(str(ra))
 entry.append(str(dec))
 entry.append(files_on_hand[i])
 file = album + str(count) + '-' + str(int(round(ra))) + ':' + str(int(round(dec))) + '.stamp.fits'
 entry.append(file)
 info.append(entry)
 
 #Create a FITS image of the stamp
 hdu = fits.PrimaryHDU()