Example #1
0
def db_add_spectrum(conn, file_cata, file_cube, ftype='reg'):

    cube = SpectralCube.read(file_cube)
    cube = cube.with_spectral_unit(u.km / u.s)
    cube = cube.spectral_slab(-200 * u.km / u.s, +200 * u.km / u.s)
    velo = cube.spectral_axis.value

    if ftype == 'reg':
        reg_list = regions.read_ds9(file_cata)
        spec_list = []
        for region in reg_list:
            gname = db.get_gname(region=region)
            try:
                sub_cube = cube.subcube_from_regions([region])
            except ValueError as err:
                print(region)
                print(err)
                continue
            spec = sub_cube.mean(axis=(1, 2))
            spec_list.append({'gname': gname, 'spec': spec, 'velo': velo})
        db.add_to_spectrum(conn, spec_list)

    if ftype == 'db':
        #TODO
        1
    return 0
Example #2
0
    def _configAxes(self, ax, title, regionFiles, regionsColors, wcs):

        if title:
            ax.set_title(title, fontsize='large')

        # interpolation = "gaussian",
        for idx, regionFile in enumerate(regionFiles):
            if regionFile is not None:
                regions = read_ds9(regionFile)
                for region in regions:
                    pixelRegion = region.to_pixel(wcs=wcs)
                    pixelRegion.plot(ax=ax, edgecolor=regionsColors[idx])

        if "GLON" in wcs.wcs.ctype[0]:
            ax.set_xlabel('Galactic Longitude')
            ax.set_ylabel('Galactic Latitude')

        elif "RA" in wcs.wcs.ctype[0]:
            ax.set_xlabel('Right ascension')
            ax.set_ylabel('Declination')

        else:
            self.logger.warning(
                self,
                f"wcs type does not contain GLAT or RA but {wcs.wcs.ctype[0]}")

        ax.grid(b=True, color='white', ls='solid')

        return ax
def ds9_region(filename):
    reg = regions.read_ds9(filename)

    comp = Component(np.ones(len(reg), dtype='bool'))
    data = RegionData(label='Regions: {0}'.format(os.path.split(filename)[-1]),
                      regions=reg)

    return data
Example #4
0
 def _parseEdgeReg(self, filename):
     pixRegions = regions.read_ds9(filename)
     if type(pixRegions[0]
             ) == regions.shapes.rectangle.RectanglePixelRegion:
         skyRegions = [r.to_sky(self.wcs) for r in pixRegions]
     else:
         skyRegions = pixRegions
     return skyRegions
Example #5
0
def make_custom_mask(fieldname,
                     imname,
                     almaimf_code_path,
                     band_id,
                     rootdir="",
                     suffix=""):

    regfn = os.path.join(
        almaimf_code_path,
        'clean_regions/{0}_{1}{2}.reg'.format(fieldname, band_id, suffix))
    regs = regions.read_ds9(regfn)

    logprint("Using region file {0} to create mask".format(regfn),
             origin='make_custom_mask')

    cube = SpectralCube.read(imname, format='casa_image')
    image = cube[0]
    if image.unit.is_equivalent(u.Jy / u.beam):
        image = image * u.beam
    else:
        assert image.unit.is_equivalent(u.Jy), "Image must be in Jansky/beam."

    mask_array = np.zeros(image.shape, dtype='bool')

    for reg in regs:

        threshold = u.Quantity(reg.meta['label'])
        assert threshold.unit.is_equivalent(
            u.Jy), "Threshold must by in mJy or Jy"

        preg = reg.to_pixel(image.wcs)
        msk = preg.to_mask()

        mask_array[msk.bbox.slices] = (msk.multiply(image) >
                                       threshold) | mask_array[msk.bbox.slices]

    # CASA transposes arrays!!!!!
    mask_array = mask_array.T

    ia.open(imname)
    assert np.all(ia.shape() == mask_array[:, :, None, None].shape
                  ), "Failure: image shape doesn't match mask shape"
    cs = ia.coordsys()
    ia.close()

    maskname = ('{fieldname}_{band_id}{suffix}_mask.mask'.format(
        fieldname=fieldname, band_id=band_id, suffix=suffix))
    # add a root directory if there is one
    # (if rootdir == "", this just returns maskname)
    maskname = os.path.join(rootdir, maskname)

    assert ia.fromarray(outfile=maskname,
                        pixels=mask_array.astype('float')[:, :, None, None],
                        csys=cs.torecord(),
                        overwrite=True), "FAILURE in final mask creation step"
    ia.close()

    return maskname
Example #6
0
def filter_with_region(evfile, regionfile, debug_plot=True, outfile=None):
    """Filter event file by specifying a fk5 region."""
    from regions import read_ds9
    from astropy.io import fits
    import astropy.units as u
    from astropy.coordinates import SkyCoord

    label = regionfile.replace('.reg', '')
    root, ext = splitext_improved(evfile)
    if outfile is None:
        outfile = root + f'_{label}' + ext

    if outfile == evfile:
        raise ValueError("Invalid output file")

    log.info(f"Opening file {evfile}")
    with fits.open(evfile) as hdul:
        wcs = get_wcs_from_bintable(hdul['EVENTS'], 'X', 'Y')
        data = hdul['EVENTS'].data
        coords = SkyCoord.from_pixel(data['X'], data['Y'], wcs, mode='wcs')

        log.info(f"Reading region {regionfile}")
        region = read_ds9(regionfile)
        mask = region[0].contains(coords, wcs)
        masked = coords[mask]
        coordsx, coordsy = coords.to_pixel(wcs)
        x, y = masked.to_pixel(wcs)
        hdul['EVENTS'].data = data[mask]
        hdul.writeto(outfile, overwrite=True)
        log.info(f"Saving to file {outfile}")

    if debug_plot:
        import matplotlib.pyplot as plt
        center = region[0].center

        ddec = 0.1
        dra = 0.1 / np.cos(center.dec)
        figurename = f"{root}.png"
        log.info(f"Plotting data in {figurename}")
        fig = plt.figure(figurename, figsize=(10, 10))
        plt.style.use('dark_background')
        noise_ra = np.random.normal(coords.ra.value, 1 / 60 / 60) * u.deg
        noise_dec = np.random.normal(coords.dec.value, 1 / 60 / 60) * u.deg
        log.info(f"Randomizing scatter points by 1'' for beauty")
        plt.subplot(projection=wcs)
        plt.scatter(noise_ra, noise_dec, s=1, alpha=0.05)
        noise_ra = np.random.normal(masked.ra.value, 1 / 60 / 60) * u.deg
        noise_dec = np.random.normal(masked.dec.value, 1 / 60 / 60) * u.deg
        plt.scatter(noise_ra, noise_dec, s=1, alpha=0.05)
        plt.xlim([(center.ra - dra * u.deg).value,
                  (center.ra + dra * u.deg).value])
        plt.ylim([(center.dec - ddec * u.deg).value,
                  (center.dec + ddec * u.deg).value])
        plt.xlabel("RA")
        plt.ylabel("Dec")
        plt.grid()
        plt.savefig(figurename)
        plt.close(fig)
Example #7
0
def test_ds9region_255(regfile):
    # specific test for correctness
    cube, data = cube_and_raw('255.fits')

    shapelist = regions.read_ds9(path(regfile))

    subcube = cube.subcube_from_regions(shapelist)
    assert_array_equal(subcube[0, :, :].value,
                           np.array([11, 12, 16, 17]).reshape((2, 2)))
Example #8
0
def test_ds9region_255(regfile):
    # specific test for correctness
    cube, data = cube_and_raw('255.fits')

    shapelist = regions.read_ds9(path(regfile))

    subcube = cube.subcube_from_regions(shapelist)
    assert_array_equal(subcube[0, :, :].value,
                           np.array([11, 12, 16, 17]).reshape((2, 2)))
Example #9
0
def mask_image(hdu, reg_file):
    """
    Mask images using a ds9 region file

    Parameters
    ----------
    hdu : astropy hdu object
        An HDU with however many extensions you want

    reg_file : string
        filename of a ds9 region file (all circles)


    Returns
    -------
    hdu_mask : astropy hdu object
        the same hdu as the input, but with 0s 

    """
    
    print('')
    print('  ** masking HDU extensions')
    print('')

    # read in the ds9 file
    regions = read_ds9(reg_file)
    # get ra/dec/radius (all in degrees)
    reg_ra = np.array( [regions[i].center.ra.deg[0] for i in range(len(regions))] )
    reg_dec = np.array( [regions[i].center.dec.deg[0] for i in range(len(regions))] )
    reg_rad_deg = np.array( [regions[i].radius.value for i in range(len(regions))] )/3600

    # go through each extension and mask
    for h in hdu:
        
        wcs_h = wcs.WCS(h.header)

        # convert to x/y
        reg_x, reg_y = wcs_h.wcs_world2pix(reg_ra, reg_dec, 1)
        reg_rad_pix = reg_rad_deg / wcs.utils.proj_plane_pixel_scales(wcs_h)[0]

        # to save time, do coarse removal of regions outside of image
        outside = (reg_x < -200) | (reg_x > h.data.shape[0]+200) | (reg_y < -200) | (reg_y > h.data.shape[1]+200)
        reg_x = reg_x[~outside]
        reg_y = reg_y[~outside]
        reg_rad_pix = reg_rad_pix[~outside]

        # go through the regions and mask
        height, width = h.data.shape
        y_grid, x_grid = np.ogrid[:height, :width]

        for i in range(len(reg_x)):
            dist_from_center = np.sqrt((x_grid - reg_x[i])**2 + (y_grid-reg_y[i])**2)
            mask = dist_from_center <= reg_rad_pix[i]
            h.data[mask] = 0

    return hdu
Example #10
0
    def __init__(self, fits_dir, galax_dir, nongalax_dir):

        self.galaxy_files = os.listdir(galax_dir)
        self.nongalax_files = os.listdir(nongalax_dir)
        self.fitsfiles = os.listdir(fits_dir)

        self.galax_dir = galax_dir
        self.fits_dir = fits_dir
        self.nongalax_dir = nongalax_dir

        self.nongalax_files = sorted(self.nongalax_files)
        self.galaxy_files = sorted(self.galaxy_files)
        self.fitsfiles = sorted(self.fitsfiles)

        # INDEX GOES GALAXIES THEN STARS FOR EACH FILE
        self.pointCounts = []
        self.galaxies = []

        totalCount = 0
        for x in range(len(self.fitsfiles)):
            # print(self.nongalax_files[x])
            # print(self.galaxy_files[x])

            galaxies = read_ds9(galax_dir + '/' + self.galaxy_files[x])

            nongalaxies = read_ds9(nongalax_dir + '/' + self.nongalax_files[x])
            numGalax = len(galaxies)

            for x in nongalaxies:
                galaxies.append(x)
            # print(len(galaxies))

            self.galaxies.append(galaxies)

            self.pointCounts.append(
                (totalCount, totalCount + numGalax, totalCount +
                 len(galaxies)))  # tuple with (0, galaxy length, totalLength)

            totalCount += len(galaxies)
            # print(totalCount)
        self.totalCount = totalCount
        print("Total objects", self.totalCount)
Example #11
0
def load_catalog(file_reg):
    reg_list = regions.read_ds9(file_reg)
    source_list = []
    name_list = []
    for reg in reg_list:
        # source: {'gname:gname,'glon':glon,'glat':glat,'radius':radius}
        source = fsp.db.region_to_source(reg)
        source_list.append(source)
        name_list.append(source['gname'])

    return name_list,source_list
Example #12
0
def parse(filename, interactive, parser, errors):
    readname = TEST_FILE_DIR / filename
    print('Reading {}'.format(readname))
    print('Using parser {}'.format(parser))
    if parser == 'regions':
        regions = read_ds9(str(readname), errors=errors)
    elif parser == 'pyregion':
        regions = pyregion.open(str(readname))
    print(regions)
    if interactive:
        import IPython
        IPython.embed()
Example #13
0
def parse(filename, interactive, parser, errors):
    readname = TEST_FILE_DIR / filename
    print('Reading {}'.format(readname))
    print('Using parser {}'.format(parser))
    if parser == 'regions':
        regions = read_ds9(str(readname), errors=errors)
    elif parser == 'pyregion':
        regions = pyregion.open(str(readname))
    print(regions)
    if interactive:
        import IPython
        IPython.embed()
Example #14
0
def test_ds9region_new(regfile, result, data_adv, use_dask):
    cube, data = cube_and_raw(data_adv, use_dask=use_dask)

    regionlist = regions.read_ds9(path(regfile))

    if isinstance(result, type) and issubclass(result, Exception):
        with pytest.raises(result):
            sc = cube.subcube_from_regions(regionlist)
    else:
        sc = cube.subcube_from_regions(regionlist)
        scsum = sc.sum()
        dsum = data[result].sum()
        assert_allclose(scsum, dsum)
Example #15
0
def db_add_source(conn, file_cata, ftype='reg'):

    if ftype == 'reg':
        reg_list = regions.read_ds9(file_cata)
        source_list = []
        for region in reg_list:
            source_list.append(db.region_to_source(region))
        db.add_to_source(conn, source_list)
    if ftype == 'db':
        #TODO
        1

    return 0
Example #16
0
def test_ds9region_new(regfile, result):
    cube, data = cube_and_raw('adv.fits')

    regionlist = regions.read_ds9(path(regfile))

    if isinstance(result, type) and issubclass(result, Exception):
        with pytest.raises(result) as exc:
            sc = cube.subcube_from_regions(regionlist)
        # this assertion is redundant, I think...
        assert exc.errisinstance(result)
    else:
        sc = cube.subcube_from_regions(regionlist)
        scsum = sc.sum()
        dsum = data[result].sum()
        assert_allclose(scsum, dsum)
Example #17
0
def test_ds9region_new(regfile, result):
    cube, data = cube_and_raw('adv.fits')

    regionlist = regions.read_ds9(path(regfile))

    if isinstance(result, type) and issubclass(result, Exception):
        with pytest.raises(result) as exc:
            sc = cube.subcube_from_regions(regionlist)
        # this assertion is redundant, I think...
        assert exc.errisinstance(result)
    else:
        sc = cube.subcube_from_regions(regionlist)
        scsum = sc.sum()
        dsum = data[result].sum()
        assert_allclose(scsum, dsum)
Example #18
0
def aperture_disk_brightness(data, noisemap, regionFile, radius=3):
    """
	
	
	"""
    from regions import read_ds9, write_ds9
    from ttools import make_radii

    if 'randomseed' in regionFile:
        seed = int(regionFile.split('randomseed')[-1])
        np.random.seed(seed=seed)
        cens = np.array([70, 70
                         ]) + np.array([160, 160]) * np.random.rand(20, 2)
        # TEMP to output randomly generated positions
        # for cen in cens:
        # 	print('circle({},{},3.5967998)'.format(cen[1]+1, cen[0]+1))
    else:
        regions = read_ds9(regionFile)
        # Get centers in numpy coordinates.
        cens = [np.array([reg.center.y, reg.center.x]) for reg in regions]

    apMeans = []
    apSums = []
    noiseApMeans = []
    noiseApSums = []
    noiseApStds = []
    nPixAps = []
    for ii, cen in enumerate(cens):
        radii = make_radii(data, cen)
        apVals = data[radii <= radius]
        apMeans.append(np.nanmean(apVals))
        apSums.append(np.nansum(apVals))
        noiseApVals = noisemap[radii <= radius]
        noiseApMeans.append(np.nanmean(noiseApVals))
        noiseApSums.append(np.nansum(noiseApVals))
        noiseApStds.append(np.nanstd(noiseApVals))
        nPixAps.append(apVals.size)

    apMeans = np.array(apMeans)
    apSums = np.array(apSums)
    noiseApMeans = np.array(noiseApMeans)
    noiseApSums = np.array(noiseApSums)
    noiseApStds = np.array(noiseApStds)
    nPixAps = np.array(nPixAps)

    return apMeans, apSums, noiseApMeans, noiseApSums, noiseApStds, nPixAps, cens
Example #19
0
def make_mask_image(hdu, mask_file):
    """
    Make a mask file (foreground stars, background galaxies) that can be
    applied to each aperture image

    Parameters
    ----------
    hdu : astropy hdu object
        An HDU with a reference image

    mask_file : string
        path+name of ds9 region file with masks


    Returns
    -------
    mask_image : array
        an image of 1s and 0s, where 0s represent masked pixels

    """

    # make a 1s image
    mask_image = np.ones(hdu.data.shape)

    # WCS for the HDU
    im_wcs = wcs.WCS(hdu.header)

    # read in the ds9 file
    region_list = regions.read_ds9(mask_file)


    for i in range(len(region_list)):

        # turn it into a pixel region (rather than sky region)
        pix_reg = region_list[i].to_pixel(im_wcs)
        # make a mask object (can only be done with pixel regions, unfortunately)
        mask_obj = pix_reg.to_mask(mode='center')
        # make a region image (1 corresponds to where the region is)
        region_im = mask_obj.to_image(hdu.data.shape)
        # mask that region
        mask_image[region_im == 1] = 0


    # return final image
    return mask_image
Example #20
0
def import_ds9_regions(ds9_file):
    """
    Convenience function to read a ds9 file containing regions and
    return a list of matching Ginga canvas objects.

    Parameters
    ----------
    ds9_file : str
        Path of a ds9 like regions file

    Returns
    -------
    objs : list
        Returns a list of Ginga canvas objects that can be added
        to a Ginga canvas
    """
    regs = regions.read_ds9(ds9_file)

    return [astropy_region_to_ginga_canvas_object(r) for r in regs]
Example #21
0
def mask_select(regfile, coords, opt=1):
    print('    - reading file...')
    regions = read_ds9(regfile)
    print('    - read file OK')

    reg_total = regions[0]
    # make composite
    if len(regions) > 1:
        for reg in regions:
            reg_total &= reg
    print('    - composite made OK')

    # find sources
    if opt == 1:  # is in
        col = reg_total.contains(coords, w)
    elif opt == 0:  # is outside of
        col = reg_total.contains(coords, w)
    print('    - column made OK')

    return col
Example #22
0
def read_arc_data_ds9(filename):
    """
    Return the sky coordinates of a star (single point of type
    `pt_star`) and arc (multiple points of type: `pt_arc`), which are
    read from the DS9 region file `filename`
    """
    regions = rg.read_ds9(filename)
    # Eliminate regions that are not points since they will cause errors
    regions = list(filter(is_pt_region, regions))

    points = [x for x in regions if x.visual["point"] == PT_ARC]
    stars = [x for x in regions if x.visual["point"] == PT_STAR]
    assert len(stars) > 0, f"At least one '{PT_STAR}' region is required"
    star = stars[0]
    if len(stars) > 1:
        print(
            f"WARNING: multiple '{PT_STAR}' regions found in {filename}",
            "- using first one",
        )
    return star, points
Example #23
0
def make_exclusion_mask(input, output, skydir, frame, binsize, width,
                        projection, overwrite):
    """Performs automated data reduction process."""
    exclusion_regions = read_ds9(input)
    log.info(f"Creating exclusion mask from {exclusion_regions}")

    skydir = SkyCoord(skydir, frame=frame)

    width = u.Quantity(width, unit="deg")
    binsize = u.Quantity(binsize, unit="deg")

    geom = WcsGeom.create(width=width,
                          binsz=binsize,
                          frame=frame,
                          skydir=skydir,
                          proj=projection)
    log.info(geom)

    mask = ~geom.region_mask(exclusion_regions)
    log.info(f"Writing exclusion mask in {output}.")
    mask.write(output, overwrite=overwrite)
Example #24
0
def load_regions(region_file):
    """
    Parameters
    ----------
    region_file : str
        Region File

    Returns
    -------
    list of CircleSkyRegions
    """

    # kludge because regions cries over "FK5", wants lowercase
    with tempfile.NamedTemporaryFile() as tmpfile, open(region_file) as rf:
        tmpfile.write(rf.read().lower().encode())
        tmpfile.flush()
        include_regions = read_ds9(tmpfile.name)
        log.info("Read %s inclusion region(s) from %s", len(include_regions),
                 region_file)

        return include_regions
Example #25
0
def validate_det1_region(regfile):
    """
    Code for making sure that region files that you're trying to use on the DET1
    analysis code are in "image" coordinates
    """
    err=-1
    from regions.io.ds9.read import DS9Parser
    from regions import read_ds9
    assert os.path.isfile(regfile), f'{regfile} does not exist!'
    
    with open(regfile) as fh: 
        region_string = fh.read()
    parser = DS9Parser(region_string)
    assert parser.coordsys == 'image', \
        f'Region coordinate system is {parser.coordsys}, not image!'

    # Check to make sure tha the first region in the file is an "include" region
    reg = read_ds9(regfile)
    for ri in reg:
        assert ri.meta['include'] is True, \
            f'\n {regfile} has an exclusion region first! \n Put the source region first instead!'
        break
Example #26
0
    def _configAxes(self, ax, title, regionFiles, regionsColors, wcs):

        if title:
            ax.set_title(title, fontsize='large')

        if len(regionFiles) != len(regionsColors):
            raise ConfigurationsNotValidError(
                "lenght of regFiles does not match with length of regFileColors\n"
            )

        # interpolation = "gaussian",
        for idx, regionFile in enumerate(regionFiles):
            if regionFile is not None:
                regions = read_ds9(regionFile)
                for region in regions:
                    pixelRegion = region.to_pixel(wcs=wcs)
                    pixelRegion.plot(ax=ax, edgecolor=regionsColors[idx])

        if "GLON" in wcs.wcs.ctype[0]:
            ax.set_xlabel('Galactic Longitude' + " (" + str(wcs.wcs.cunit[0]) +
                          ")")
            ax.set_ylabel('Galactic Latitude' + " (" + str(wcs.wcs.cunit[1]) +
                          ")")

        elif "RA" in wcs.wcs.ctype[0]:
            ax.set_xlabel('Right ascension' + " (" + str(wcs.wcs.cunit[0]) +
                          ")")
            ax.set_ylabel('Declination' + " (" + str(wcs.wcs.cunit[1]) + ")")

        else:
            self.logger.warning(
                self,
                f"wcs type does not contain GLAT or RA but {wcs.wcs.ctype[0]}")

        ax.grid(b=True, color='white', ls='solid')

        return ax
def colorize_region(
    fn,
    vlims=None,
):

    regs = regions.read_ds9(fn)

    if vlims is not None:
        vmin, vmax = vlims
    else:
        vels = [float(reg.meta['text'].strip("{}")) for reg in regs]
        vmin, vmax = min(vels), max(vels)

    cmap = pl.cm.Spectral_r
    cmap = pl.cm.spectral
    norm = pl.matplotlib.colors.Normalize(vmin=vmin, vmax=vmax)

    for reg in regs:
        text = reg.meta['text'].strip('{}')
        velo = float(text)
        color = pl.matplotlib.colors.rgb2hex(cmap(norm(velo)))
        reg.meta['color'] = color

    return regs
Example #28
0
coord = coordinates.SkyCoord(83.81048816210084,
                             -5.3751716623649575,
                             frame='icrs',
                             unit=(u.hour, u.deg))

#diskycoord_list = pyregion.open(paths.rpath("{0}_disk_pvextract.reg"
#                                            .format(source)))[0].coord_list
#diskycoords = coordinates.SkyCoord(["{0} {1}".format(diskycoord_list[jj],
#                                                     diskycoord_list[jj+1])
#                                    for jj in range(0,
#                                                    len(diskycoord_list),
#                                                    2)], unit=(u.deg,
#                                                               u.deg),
#                                   frame='icrs')
#diskycoorddict[source] = diskycoords
diskycoord_list = regions.read_ds9(
    paths.rpath("{0}_disk_pvextract.reg".format(source)))
diskycoorddict[source] = coordinates.SkyCoord(
    [diskycoord_list[0].start, diskycoord_list[0].end])

for width in (0.01, 0.05, 0.1, 0.2, 0.3, 0.4):
    for name, cutoutname, source, vrange, vcen in (('sourceI', 'sourceI',
                                                    coord, (-30, 40),
                                                    assumed_vcen), ):

        diskycoords = diskycoorddict[name]

        for fnt in (
                #'/Volumes/external/orion/OrionSourceI_only.B6.robust0.5.spw{0}.maskedclarkclean10000.image.pbcor.fits',
                #'/Volumes/external/orion/OrionSourceI_only.B6.robust-2.spw{0}.maskedclarkclean10000.image.pbcor.fits',
                #'/Volumes/external/orion/OrionSourceI_only.B6.robust-2.longbaselines.spw{0}.maskedclarkclean10000.image.pbcor.fits',
                #'/Volumes/external/orion/OrionSourceI_only.B3.robust-2.spw{0}.clarkclean10000.image.pbcor.fits',
Example #29
0
def imstats(fn, reg=None):
    try:
        fh = fits.open(fn)
        data = fh[0].data
        ww = wcs.WCS(fh[0].header)
    except IsADirectoryError:
        cube = SpectralCube.read(fn, format='casa_image')
        data = cube[0].value
        ww = cube.wcs

    mad = mad_std(data, ignore_nan=True)
    peak = np.nanmax(data)
    imsum = np.nansum(data)
    sumgt5sig = np.nansum(data[data > 5 * mad])
    sumgt3sig = np.nansum(data[data > 3 * mad])

    pixscale = wcs.utils.proj_plane_pixel_area(ww) * u.deg**2

    with warnings.catch_warnings():
        warnings.filterwarnings('ignore', category=UserWarning, append=True)
        warnings.filterwarnings('ignore', category=RuntimeWarning, append=True)

        if 'cube' in locals():
            try:
                bm = cube.beam
                ppbeam = (bm.sr / pixscale).decompose()
                assert ppbeam.unit.is_equivalent(u.dimensionless_unscaled)
                ppbeam = ppbeam.value
            except NoBeamError:
                ppbeam = np.nan
                bm = Beam(np.nan)
        else:
            try:
                bm = Beam.from_fits_header(fh[0].header)
                ppbeam = (bm.sr / pixscale).decompose()
                assert ppbeam.unit.is_equivalent(u.dimensionless_unscaled)
                ppbeam = ppbeam.value
            except NoBeamException:
                ppbeam = np.nan
                bm = Beam(np.nan)

        meta = {
            'beam': bm.to_header_keywords(),
            'bmaj': bm.major.to(u.arcsec).value,
            'bmin': bm.minor.to(u.arcsec).value,
            'bpa': bm.pa.value,
            'mad': mad,
            'peak': peak,
            'peak/mad': peak / mad,
            'ppbeam': ppbeam,
            'sum': imsum,
            'fluxsum': imsum / ppbeam,
            'sumgt5sig': sumgt5sig,
            'sumgt3sig': sumgt3sig,
        }

    if reg is not None:
        reglist = regions.read_ds9(reg)
        data = data.squeeze()
        composite_region = reduce(operator.or_, reglist)
        if hasattr(composite_region, 'to_mask'):
            msk = composite_region.to_mask()
        else:
            preg = composite_region.to_pixel(ww.celestial)
            msk = preg.to_mask()
        cutout_pixels = msk.cutout(data)[msk.data.astype('bool')]

        meta['mad_sample'] = mad_std(cutout_pixels, ignore_nan=True)
        meta['std_sample'] = np.nanstd(cutout_pixels)

    if fn.endswith('.image.tt0') or fn.endswith(
            '.image.tt0.fits') or fn.endswith(
                '.image.tt0.pbcor.fits') or fn.endswith('.image.tt0.pbcor'):
        psf_fn = fn.split(".image.tt0")[0] + ".psf.tt0"
    elif fn.endswith('.model.tt0') or fn.endswith(
            '.model.tt0.fits') or fn.endswith(
                '.model.tt0.pbcor.fits') or fn.endswith('.model.tt0.pbcor'):
        psf_fn = fn.split(".model.tt0")[0] + ".psf.tt0"
    elif fn.endswith('.image') or fn.endswith('.image.fits') or fn.endswith(
            '.image.pbcor.fits') or fn.endswith('.image.pbcor'):
        psf_fn = fn.split(".image") + ".psf"
    else:
        raise IOError("Wrong image type passed to imstats: {fn}".format(fn=fn))

    if os.path.exists(psf_fn):
        psf_secondpeak, psf_secondpeak_loc, psf_sidelobe1_fraction = get_psf_secondpeak(
            psf_fn)
        meta['psf_secondpeak'] = psf_secondpeak
        meta['psf_secondpeak_radius'] = psf_secondpeak_loc
        meta['psf_secondpeak_sidelobefraction'] = psf_sidelobe1_fraction
    else:
        meta['psf_secondpeak'] = np.nan
        meta['psf_secondpeak_radius'] = np.nan
        meta['psf_secondpeak_sidelobefraction'] = np.nan

    return meta
Example #30
0
import numpy as np
import paths
from astropy.io import fits
import pylab as pl
from astropy import wcs
import astropy.visualization
from astropy.nddata import Cutout2D
from astropy import coordinates
from astropy import units as u
import regions

noise_regions = regions.read_ds9(paths.rpath('noise_regions.reg'))

sc5tt = fits.open(
    paths.mergepath(
        'continuum/SgrB2_selfcal_full_TCTE7m_selfcal5_ampphase_taylorterms_multiscale_deeper_mask2.5mJy.image.tt0.pbcor.fits'
    ))
mywcs = wcs.WCS(sc5tt[0].header)

data = sc5tt[0].data
peak = np.nanmax(data)
print("Peak flux: {0} Jy".format(peak))

for reg in noise_regions:

    pixreg = reg.to_pixel(mywcs)
    mask = pixreg.to_mask()
    cutout = mask.cutout(data) * mask.data

    name = reg.meta['text'].strip("{}")
Example #31
0
    new_header = cutout.wcs.to_header()
    mywcs = cutout.wcs

    fits.PrimaryHDU(data=data,
                    header=new_header).writeto(
                        paths.dpath(
                            'contmodels/{0}_fitted_data.fits'
                            .format(band)),
                        overwrite=True)

    pixscale = wcs.utils.proj_plane_pixel_area(mywcs)**0.5 * u.deg

    # old version diskends = coordinates.SkyCoord(['5:35:14.5232 -5:22:30.73',
    # old version                                  '5:35:14.5132 -5:22:30.54'],
    # old version                                 frame='fk5', unit=(u.hour, u.deg))
    diskend_regs = regions.read_ds9(paths.rpath('diskends.reg'))
    diskends = coordinates.SkyCoord([reg.center for reg in diskend_regs])

    diskends_pix = np.array(mywcs.wcs_world2pix(diskends.ra.deg, diskends.dec.deg, 0))
    (x1,x2),(y1,y2) = diskends_pix

    observed_beam = radio_beam.Beam.from_fits_header(fh[0].header)
    beams[band] = observed_beam

    data_K = (data*u.Jy).to(u.K, observed_beam.jtok_equiv(freq))

    new_header_K = new_header.copy()
    new_header_K['BUNIT'] = 'K'
    fits.PrimaryHDU(data=data_K.value, header=new_header_K).writeto(
        paths.dpath('contmodels/{0}_data_K_cutout.fits'.format(band)),
        overwrite=True)
Example #32
0
 def _readCenterReg(self, filename):
     cntReg = regions.read_ds9(filename)
     return cntReg
import astropy.units as u

imgHeaderPath = Path('headers')
regFile = Path('pixCoords/xyreg.lis')
regDirec = Path('regionFiles/xyregs')
finalReg = Path('regionFiles/skyregs')

arcsec2deg = 1 / 3600
regRadius = 20. * arcsec2deg

fp = open(regFile, 'r')
ll = fp.readlines()
fp.close()

for name in range(len(ll)):
    #     print(regName)
    regName = Path(ll[name])
    print(regName)
    imgName = regName.with_suffix('')
    print(imgName)
    imgName = imgName.with_suffix('')
    print(imgName)
    reg = read_ds9(regDirec.joinpath(regName).with_suffix('.reg'))
    hdr = Header.fromtextfile(
        str(imgHeaderPath.joinpath(imgName).with_suffix('.head')))
    w = WCS(hdr)
    for rr in range(len(reg)):
        reg[rr] = reg[rr].to_sky(wcs=w)

    write_ds9(reg, finalReg.joinpath(imgName).with_suffix('.sky.reg'))
import radio_beam
from astropy import units as u
from astropy import log
from astropy import wcs
from astropy.stats import mad_std
from astropy.io import fits
from astropy.table import Table,Column

import paths
from core_photometry import photometry

lines = ["CFp", "CH3OH7m26-716", "H15NC", "H2CO615-616", "H2CS303-202",
         "H2CS313-212", "H2CS322-221", "H41a", "HC3N", "HCN", "HNC",]

if __name__ == "__main__":
    regs = regions.read_ds9(paths.rpath('sgrb2_cores_TE.reg'))

    all_results = {}

    for line in lines:
        fn = paths.mergepath("max/SgrB2_b3_7M_12M.{0}.image.pbcor_max_medsub.fits".format(line))
        ffile = fits.open(fn)
        data = ffile[0].data
        header = ffile[0].header
        beam = radio_beam.Beam.from_fits_header(header)
        mywcs = wcs.WCS(header)

        units = {'peak':u.Jy/u.beam,
                 'sum':u.Jy/u.beam,
                 'npix':u.dimensionless_unscaled,
                 'beam_area':u.sr,
Example #35
0
from astropy.utils.data import get_pkg_data_filename
from astropy.io import fits


from matplotlib import pyplot as plt

from regions import read_ds9

image_file = get_pkg_data_filename('tutorials/FITS-images/HorseHead.fits')
print(image_file)
image_data = fits.getdata(image_file, ext=0)

ax = plt.gca()
plt.imshow(image_data, cmap='gray')

print(ax.get_xlim(), ax.get_ylim())
ax.set_ylim([-0.5, 892.5])

regs = read_ds9('plot_image.reg')

for i, reg in enumerate(regs):
    reg.plot(ax=ax)

plt.show()
p_region_count = re.compile(r"[^=\)]\(")

results = list()

for filename in TEST_FILE_DIR.glob('*.reg'):
    print('\n\n{}'.format(filename))

    # estimate total number of regions
    n_regions = 0
    with open(str(filename)) as origin_file:
        for line in origin_file:
            n_regions += len(p_region_count.findall(line))

    # regions
    try:
        region_regions = read_ds9(str(filename), errors='warn')
    except DS9RegionParserError:
        time_regions = -1
        compl_regions = 0
    else:
        time_regions = timeit.timeit(
            "read_ds9(str(filename), errors='warn')",
            setup='from regions import read_ds9',
            globals=globals(),
            number=REPETITIONS) / REPETITIONS

        compl_regions = np.divide(len(region_regions), n_regions)

    # pyregion
    try:
        pyregion_regions = pyregion.open(str(filename))