예제 #1
0
def load_and_reduce(filename, add_noise=False, rms_noise=0.001,
                    nsig=3):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise:
        if rms_noise is None:
            raise TypeError("Must specify value of rms noise.")

        cube, hdr = getdata(filename, header=True)

        from scipy.stats import norm
        cube += norm.rvs(0.0, rms_noise, cube.shape)

        sc = SpectralCube(data=cube, wcs=WCS(hdr))

        mask = LazyMask(np.isfinite, sc)
        sc = sc.with_mask(mask)

    else:
        sc = filename

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)
    reduc.make_moments()
    reduc.make_moment_errors()

    return reduc.to_dict()
예제 #2
0
def load_and_reduce(filename,
                    add_noise=False,
                    rms_noise=0.001,
                    nsig=3,
                    slicewise_noise=True):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise:
        if rms_noise is None:
            raise TypeError("Must specify value of rms noise.")

        cube, hdr = getdata(filename, header=True)

        # Optionally scale noise by 1/10th of the 98th percentile in the cube
        if rms_noise == 'scaled':
            rms_noise = 0.1 * np.percentile(cube[np.isfinite(cube)], 98)

        from scipy.stats import norm
        if not slicewise_noise:
            cube += norm.rvs(0.0, rms_noise, cube.shape)
        else:
            spec_shape = cube.shape[0]
            slice_shape = cube.shape[1:]
            for i in range(spec_shape):
                cube[i, :, :] += norm.rvs(0.0, rms_noise, slice_shape)

        sc = SpectralCube(data=cube, wcs=WCS(hdr))

        mask = LazyMask(np.isfinite, sc)
        sc = sc.with_mask(mask)

    else:
        sc = filename

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)
    reduc.make_moments()
    reduc.make_moment_errors()

    return reduc.to_dict()
예제 #3
0
def load_and_reduce(filename, add_noise=False, rms_noise=0.001,
                    nsig=3, slicewise_noise=True):
    '''
    Load the cube in and derive the property arrays.
    '''

    if add_noise:
        if rms_noise is None:
            raise TypeError("Must specify value of rms noise.")

        cube, hdr = getdata(filename, header=True)

        # Optionally scale noise by 1/10th of the 98th percentile in the cube
        if rms_noise == 'scaled':
            rms_noise = 0.1*np.percentile(cube[np.isfinite(cube)], 98)

        from scipy.stats import norm
        if not slicewise_noise:
            cube += norm.rvs(0.0, rms_noise, cube.shape)
        else:
            spec_shape = cube.shape[0]
            slice_shape = cube.shape[1:]
            for i in range(spec_shape):
                cube[i, :, :] += norm.rvs(0.0, rms_noise, slice_shape)

        sc = SpectralCube(data=cube, wcs=WCS(hdr))

        mask = LazyMask(np.isfinite, sc)
        sc = sc.with_mask(mask)

    else:
        sc = filename

    reduc = Mask_and_Moments(sc, scale=rms_noise)
    reduc.make_mask(mask=reduc.cube > nsig * reduc.scale)
    reduc.make_moments()
    reduc.make_moment_errors()

    return reduc.to_dict()
예제 #4
0
cube2 = SpectralCube.read(fits2)

# Shorten the name for the plots
fits1 = os.path.basename(fits1)
fits2 = os.path.basename(fits2)

# Naive error estimation. Useful for only testing out the methods.
scale1 = cube1.std().value
scale2 = cube2.std().value

set1 = Mask_and_Moments(cube1, scale=scale1)
# mask = cube1 > sigma * set1.scale
# set1.make_mask(mask=mask)
set1.make_moments()
set1.make_moment_errors()
dataset1 = set1.to_dict()

set2 = Mask_and_Moments(cube2, scale=scale2)
# mask = cube2 > sigma * set2.scale
# set2.make_mask(mask=mask)
set2.make_moments()
set2.make_moment_errors()
dataset2 = set2.to_dict()

# Wavelet Transform

from turbustat.statistics import Wavelet_Distance

wavelet_distance = Wavelet_Distance(
    dataset1["moment0"], dataset2["moment0"]).distance_metric(verbose=True,
                                                              label1=fits1,
예제 #5
0
if add_noise:
    data1 += np.random.normal(0.0, 0.788 / 10, data1.shape)
    data2 += np.random.normal(0.0, 0.788 / 10, data2.shape)

cube1 = SpectralCube(data=data1, wcs=WCS(hdr1))
cube1 = cube1.with_mask(LazyMask(np.isfinite, cube1))
cube2 = SpectralCube(data=data2, wcs=WCS(hdr2))
cube2 = cube2.with_mask(LazyMask(np.isfinite, cube2))

set1 = Mask_and_Moments(cube1)
mask = cube1 > 3*set1.scale
set1.make_mask(mask=mask)
set1.make_moments()
set1.make_moment_errors()
dataset1 = set1.to_dict()

set2 = Mask_and_Moments(cube2)
mask = cube2 > 3*set2.scale
set2.make_mask(mask=mask)
set2.make_moments()
set2.make_moment_errors()
dataset2 = set2.to_dict()


# Wavelet Transform

from turbustat.statistics import Wavelet_Distance

wavelet_distance = Wavelet_Distance(dataset1["integrated_intensity"],
                                    dataset2["integrated_intensity"]).distance_metric(verbose=True)