예제 #1
0
def load_and_reduce(filename, add_noise=False, rms_noise=0.001,
                    nsig=3):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise:
        if rms_noise is None:
            raise TypeError("Must specify value of rms noise.")

        cube, hdr = getdata(filename, header=True)

        from scipy.stats import norm
        cube += norm.rvs(0.0, rms_noise, cube.shape)

        sc = SpectralCube(data=cube, wcs=WCS(hdr))

        mask = LazyMask(np.isfinite, sc)
        sc = sc.with_mask(mask)

    else:
        sc = filename

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)
    reduc.make_moments()
    reduc.make_moment_errors()

    return reduc.to_dict()
def reduce_and_save(filename, add_noise=False, rms_noise=0.001,
                    output_path="", cube_output=None,
                    nsig=3, slicewise_noise=True):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise:
        if rms_noise is None:
            raise TypeError("Must specify value of rms noise.")

        cube, hdr = getdata(filename, header=True)

        # Optionally scale noise by 1/10th of the 98th percentile in the cube
        if rms_noise == 'scaled':
            rms_noise = 0.1*np.percentile(cube[np.isfinite(cube)], 98)

        from scipy.stats import norm
        if not slicewise_noise:
            cube += norm.rvs(0.0, rms_noise, cube.shape)
        else:
            spec_shape = cube.shape[0]
            slice_shape = cube.shape[1:]
            for i in range(spec_shape):
                cube[i, :, :] += norm.rvs(0.0, rms_noise, slice_shape)

        sc = SpectralCube(data=cube, wcs=WCS(hdr))

        mask = LazyMask(np.isfinite, sc)
        sc = sc.with_mask(mask)

    else:
        sc = filename

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)

    reduc.make_moments()
    reduc.make_moment_errors()

    # Remove .fits from filename
    save_name = filename.split("/")[-1][:-4]

    reduc.to_fits(output_path+save_name)

    # Save the noisy cube too
    if add_noise:
        if cube_output is None:
            reduc.cube.hdu.writeto(output_path+save_name)
        else:
            reduc.cube.hdu.writeto(cube_output+save_name)
예제 #3
0
def load_and_reduce(filename,
                    add_noise=False,
                    rms_noise=0.001,
                    nsig=3,
                    slicewise_noise=True):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise:
        if rms_noise is None:
            raise TypeError("Must specify value of rms noise.")

        cube, hdr = getdata(filename, header=True)

        # Optionally scale noise by 1/10th of the 98th percentile in the cube
        if rms_noise == 'scaled':
            rms_noise = 0.1 * np.percentile(cube[np.isfinite(cube)], 98)

        from scipy.stats import norm
        if not slicewise_noise:
            cube += norm.rvs(0.0, rms_noise, cube.shape)
        else:
            spec_shape = cube.shape[0]
            slice_shape = cube.shape[1:]
            for i in range(spec_shape):
                cube[i, :, :] += norm.rvs(0.0, rms_noise, slice_shape)

        sc = SpectralCube(data=cube, wcs=WCS(hdr))

        mask = LazyMask(np.isfinite, sc)
        sc = sc.with_mask(mask)

    else:
        sc = filename

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)
    reduc.make_moments()
    reduc.make_moment_errors()

    return reduc.to_dict()
예제 #4
0
def load_and_reduce(filename, add_noise=False, rms_noise=0.001,
                    nsig=3, slicewise_noise=True):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise:
        if rms_noise is None:
            raise TypeError("Must specify value of rms noise.")

        cube, hdr = getdata(filename, header=True)

        # Optionally scale noise by 1/10th of the 98th percentile in the cube
        if rms_noise == 'scaled':
            rms_noise = 0.1*np.percentile(cube[np.isfinite(cube)], 98)

        from scipy.stats import norm
        if not slicewise_noise:
            cube += norm.rvs(0.0, rms_noise, cube.shape)
        else:
            spec_shape = cube.shape[0]
            slice_shape = cube.shape[1:]
            for i in range(spec_shape):
                cube[i, :, :] += norm.rvs(0.0, rms_noise, slice_shape)

        sc = SpectralCube(data=cube, wcs=WCS(hdr))

        mask = LazyMask(np.isfinite, sc)
        sc = sc.with_mask(mask)

    else:
        sc = filename

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)
    reduc.make_moments()
    reduc.make_moment_errors()

    return reduc.to_dict()
예제 #5
0
    kernel = beam.as_tophat_kernel(pixscale)
    kernel_pix = (kernel.array > 0).sum()

    for i in ProgressBar(mask.shape[0]):
        mask[i] = nd.binary_opening(mask[i], kernel)
        mask[i] = nd.binary_closing(mask[i], kernel)
        mask[i] = mo.remove_small_objects(mask[i], min_size=kernel_pix,
                                          connectivity=2)
        mask[i] = mo.remove_small_holes(mask[i], min_size=kernel_pix,
                                        connectivity=2)

    # Each region must contain a point above the peak_snr
    labels, num = nd.label(mask, np.ones((3, 3, 3)))
    for n in range(1, num + 1):
        pts = np.where(labels == n)
        if np.nanmax(snr[pts]) < peak_snr:
            mask[pts] = False

    masked_cube = cube.with_mask(mask)

    # Save the cube
    masked_cube.write("{}.masked.fits".format(name))

    # Now make
    reduc = Mask_and_Moments(masked_cube, scale=noise.scale)

    reduc.make_moments()
    reduc.make_moment_errors()

    reduc.to_fits(os.path.join("moments/", name))
예제 #6
0
    for i in ProgressBar(mask.shape[0]):
        mask[i] = nd.binary_opening(mask[i], kernel)
        mask[i] = nd.binary_closing(mask[i], kernel)
        mask[i] = mo.remove_small_objects(mask[i],
                                          min_size=kernel_pix,
                                          connectivity=2)
        mask[i] = mo.remove_small_holes(mask[i],
                                        min_size=kernel_pix,
                                        connectivity=2)

    # Each region must contain a point above the peak_snr
    labels, num = nd.label(mask, np.ones((3, 3, 3)))
    for n in range(1, num + 1):
        pts = np.where(labels == n)
        if np.nanmax(snr[pts]) < peak_snr:
            mask[pts] = False

    masked_cube = cube.with_mask(mask)

    # Save the cube
    masked_cube.write("{}.masked.fits".format(name))

    # Now make
    reduc = Mask_and_Moments(masked_cube, scale=noise.scale)

    reduc.make_moments()
    reduc.make_moment_errors()

    reduc.to_fits(os.path.join("moments/", name))
예제 #7
0
                 append_prefix=True, design_labels=[], verbose=False)

# Now the AMR cubes
fiducials_amr, _, _ = \
    files_sorter(path_to_amrdata, append_prefix=True, design_labels=[],
                 faces=faces, timesteps='last', verbose=False)

# If the AMR moments path doesn't exist, make the moment arrays and save.
if not os.path.exists(amrmoments_path):

    os.mkdir(amrmoments_path)

    for face in faces:
        for fid in fiducials_amr[face]:
            fid_name = fiducials_amr[face][fid]
            mask_mom = Mask_and_Moments(fid_name, scale=0.001 * u.K)
            mask_mom.make_moments()
            mask_mom.make_moment_errors()

            save_name = os.path.splitext(os.path.basename(fid_name))[0]
            mask_mom.to_fits(os.path.join(amrmoments_path, save_name))

# Now run the distances AMR vs. none.
statistics = copy(statistics_list)
statistics.append("DeltaVariance_Centroid_Curve")
statistics.append("DeltaVariance_Centroid_Slope")

print "Statistics to run: %s" % (statistics)
num_statistics = len(statistics)

for face in faces:
def reduce_and_save(filename,
                    add_noise=False,
                    regrid_linewidth=False,
                    rms_noise=0.001 * u.K,
                    output_path="",
                    cube_output=None,
                    nsig=3,
                    slicewise_noise=True):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise or regrid_linewidth:

        sc = SpectralCube.read(filename)

        if add_noise:
            if rms_noise is None:
                raise TypeError("Must specify value of rms noise.")

            cube = sc.filled_data[:].value

            # Optionally scale noise by 1/10th of the 98th percentile in the
            # cube
            if rms_noise == 'scaled':
                rms_noise = 0.1 * \
                    np.percentile(cube[np.isfinite(cube)], 98) * sc.unit

            from scipy.stats import norm
            if not slicewise_noise:
                cube += norm.rvs(0.0, rms_noise.value, cube.shape)
            else:
                spec_shape = cube.shape[0]
                slice_shape = cube.shape[1:]
                for i in range(spec_shape):
                    cube[i, :, :] += norm.rvs(0.0, rms_noise.value,
                                              slice_shape)

            sc = SpectralCube(data=cube * sc.unit,
                              wcs=sc.wcs,
                              meta={"BUNIT": "K"})

            mask = LazyMask(np.isfinite, sc)
            sc = sc.with_mask(mask)

        if regrid_linewidth:
            # Normalize the cubes to have the same linewidth
            # channels_per_sigma=20 scales to the largest mean line width in
            # SimSuite8 (~800 km/s; Design 22). So effectively everything is
            # "smoothed" to have this line width
            # Intensities are normalized by their 95% value.
            sc = preprocessor(sc,
                              min_intensity=nsig * rms_noise,
                              norm_intensity=True,
                              norm_percentile=95,
                              channels_per_sigma=20)

    else:
        sc = filename

    # Run the same signal masking procedure that was used for the
    # COMPLETE cubes
    if add_noise:
        # The default settings were set based on these cubes
        sc = make_signal_mask(sc)[0]

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    if not add_noise:
        reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)

    reduc.make_moments()
    reduc.make_moment_errors()

    # Remove .fits from filename
    save_name = os.path.splitext(os.path.basename(filename))[0]

    reduc.to_fits(os.path.join(output_path, save_name))

    # Save the noisy cube too
    if add_noise or regrid_linewidth:
        save_name += ".fits"
        if cube_output is None:
            sc.hdu.writeto(os.path.join(output_path, save_name))
        else:
            sc.hdu.writeto(os.path.join(cube_output, save_name))
예제 #9
0
    DendroDistance, PDF_Distance
import os
import matplotlib.pyplot as p
import seaborn as sns
sns.set_style("white")
import astropy.units as u

# p.ioff()


path_to_data = "/media/eric/Data_3/Astrostat/SimSuite8/"
moments_path = os.path.join(path_to_data, "moments/")
figure_path = os.path.expanduser("~/Dropbox/My_Papers/Submitted/astrostat-paper2/method_figures/")

des2 = "lustrehomeerosSimSuite8Design2_flatrho_0029_00_radmc.fits"
dataset1 = Mask_and_Moments.from_fits(os.path.join(path_to_data, des2),
                                      moments_path=moments_path).to_dict()
# des19 = "lustrehomeerosSimSuite8Design19_flatrho_0030_00_radmc.fits"
des19 = "lustrehomeerosSimSuite7Fiducial1_flatrho_0029_00_radmc.fits"
dataset2 = Mask_and_Moments.from_fits(os.path.join(path_to_data, des19),
                                      moments_path=moments_path).to_dict()

label1 = "Design 2"
label2 = "Fiducial 1"
# label2 = "Design 19"

values = {}

Wavelet Transform

wavelet_distance = \
    Wavelet_Distance(dataset1["moment0"],
# Now the 256 cubes
fiducials_256, _, _ = \
    files_sorter(path_to_256, append_prefix=True, design_labels=[],
                 faces=faces, fiducial_labels=[256], timesteps=1)

if run_moments:
    # Save moment arrays of the 256 cubes.

    moments_path = os.path.join(path_to_256, "moments")
    if not os.path.exists(moments_path):
        os.mkdir(moments_path)

    for face in fiducials_256:
        fid_name = fiducials_256[face][256][0]
        mask_mom = Mask_and_Moments(fid_name, scale=0.001 * u.K)
        mask_mom.make_moments()
        mask_mom.make_moment_errors()
        save_name = os.path.splitext(os.path.basename(fid_name))[0]

        mask_mom.to_fits(os.path.join(moments_256_path, save_name))

if run_distances:

    # Set which stats to run.
    statistics = copy(statistics_list)
    # statistics.remove("Dendrogram_Hist")
    # statistics.remove("Dendrogram_Num")
    statistics.append("DeltaVariance_Centroid_Curve")
    statistics.append("DeltaVariance_Centroid_Slope")
# Now the 256 cubes
fiducials_256, _, _ = \
    files_sorter(path_to_256, append_prefix=True, design_labels=[],
                 faces=faces, fiducial_labels=[256], timesteps=1)

if run_moments:
    # Save moment arrays of the 256 cubes.

    moments_path = os.path.join(path_to_256, "moments")
    if not os.path.exists(moments_path):
        os.mkdir(moments_path)

    for face in fiducials_256:
        fid_name = fiducials_256[face][256][0]
        mask_mom = Mask_and_Moments(fid_name, scale=0.001 * u.K)
        mask_mom.make_moments()
        mask_mom.make_moment_errors()
        save_name = os.path.splitext(os.path.basename(fid_name))[0]

        mask_mom.to_fits(os.path.join(moments_256_path, save_name))


if run_distances:

    # Set which stats to run.
    statistics = copy(statistics_list)
    # statistics.remove("Dendrogram_Hist")
    # statistics.remove("Dendrogram_Num")
    statistics.append("DeltaVariance_Centroid_Curve")
    statistics.append("DeltaVariance_Centroid_Slope")
예제 #12
0
fits2 = str(sys.argv[2])

# scale = float(sys.argv[3])

cube1 = SpectralCube.read(fits1)
cube2 = SpectralCube.read(fits2)

# Shorten the name for the plots
fits1 = os.path.basename(fits1)
fits2 = os.path.basename(fits2)

# Naive error estimation. Useful for only testing out the methods.
scale1 = cube1.std().value
scale2 = cube2.std().value

set1 = Mask_and_Moments(cube1, scale=scale1)
# mask = cube1 > sigma * set1.scale
# set1.make_mask(mask=mask)
set1.make_moments()
set1.make_moment_errors()
dataset1 = set1.to_dict()

set2 = Mask_and_Moments(cube2, scale=scale2)
# mask = cube2 > sigma * set2.scale
# set2.make_mask(mask=mask)
set2.make_moments()
set2.make_moment_errors()
dataset2 = set2.to_dict()

# Wavelet Transform
예제 #13
0
    add_noise = False


data1, hdr1 = fits.getdata(fits1, header=True)
data2, hdr2 = fits.getdata(fits2, header=True)

if add_noise:
    data1 += np.random.normal(0.0, 0.788 / 10, data1.shape)
    data2 += np.random.normal(0.0, 0.788 / 10, data2.shape)

cube1 = SpectralCube(data=data1, wcs=WCS(hdr1))
cube1 = cube1.with_mask(LazyMask(np.isfinite, cube1))
cube2 = SpectralCube(data=data2, wcs=WCS(hdr2))
cube2 = cube2.with_mask(LazyMask(np.isfinite, cube2))

set1 = Mask_and_Moments(cube1)
mask = cube1 > 3*set1.scale
set1.make_mask(mask=mask)
set1.make_moments()
set1.make_moment_errors()
dataset1 = set1.to_dict()

set2 = Mask_and_Moments(cube2)
mask = cube2 > 3*set2.scale
set2.make_mask(mask=mask)
set2.make_moments()
set2.make_moment_errors()
dataset2 = set2.to_dict()


# Wavelet Transform