Exemple #1
0
def do_vca(vcacube, array_save_loc, fig_save_loc):
    """This function greets to
    the person passed in as
    parameter"""

    vca_array = np.zeros(3)
    #arlen=int(len(vcacube.data[:,0,0]))/10

    #do full thickness mom0 SPS and add to array first
    #import data and compute moment 0
    moment0 = vcacube.moment(order=0)

    #compute SPS, add in distance at some point as parameter
    pspec = PowerSpectrum(moment0)
    pspec.run(verbose=False, xunit=u.pix**-1)
    vca_array = np.vstack(
        (vca_array, [pspec.slope,
                     len(vcacube[:, 0, 0]), pspec.slope_err]))

    #iterate VCA over fractions of the total width of the PPV vcacube
    for i in [128, 64, 32, 16, 8, 4, 2, 1]:
        vcacube.allow_huge_operations = True
        downsamp_vcacube = vcacube.downsample_axis(i, axis=0)
        downsamp_vcacube.allow_huge_operations = True
        vca = VCA(downsamp_vcacube)
        vca.run(verbose=False,
                beam_correct=correctbeam,
                save_name=fig_save_loc + '_thickness' + str(i) + '.png')
        vca_array = np.vstack((vca_array, [vca.slope, i, vca.slope_err]))
    vca_array = vca_array[1:, :]

    #save the array for future plotting without recomputing
    np.save(array_save_loc, vca_array)
Exemple #2
0
def do_vca(input):
    #split inputs into the components
    vcacube, chansamps, array_save_loc, fig_save_loc = input
    print('starting' + str(vcacube))

    #load in the vcacube
    vcacube = SpectralCube.read(vcacube)

    #check for only nans in first slice of cube, even though it reads the unmasked
    # data the data in the fits may be masked in some way before it reaches this point which will cause the vca to fail if there are only nonfinite values in the subcube

    finites = 0
    nonfinites = 0
    for checkx in np.arange(0, len(vcacube.unmasked_data[0, :, 0])):
        for checky in np.arange(0, len(vcacube.unmasked_data[0, 0, :])):
            if np.isfinite(vcacube.unmasked_data[0, checkx, checky]) == True:
                finites += 1
            else:
                nonfinites += 1

    #do vca or skip depending on whether its only NaNs
    if finites < 1:
        return 'data is only NaNs/inf'
    else:
        #do full thickness mom0 SPS and add to array first
        #import data and compute moment 0
        moment0 = vcacube.moment(order=0)

        #compute SPS, add in distance at some point as parameter
        pspec = PowerSpectrum(moment0)
        pspec.run(verbose=False, xunit=u.pix**-1)
        vca_array = [pspec.slope, len(vcacube[:, 0, 0]), pspec.slope_err]

        #iterate VCA over fractions of the total width of the PPV vcacube
        #for i in [128,64,32,16,8,4,2,1]:
        for i in chansamps:
            vcacube.allow_huge_operations = True
            downsamp_vcacube = vcacube.downsample_axis(i, axis=0)
            downsamp_vcacube.allow_huge_operations = True
            vca = VCA(downsamp_vcacube)
            vca.run(verbose=True, save_name=f'{fig_save_loc}_thickness{i}.png')
            vca_array = np.vstack((vca_array, [vca.slope, i, vca.slope_err]))

        #save the array for future plotting without recomputing
        np.save(array_save_loc, vca_array)
        return vca_array
    print('finished' + str(vcacube))
Exemple #3
0
def run_pspec(cube,
              distance=414 * u.pc,
              xunit=u.pix**-1,
              pickle_file=None,
              keep_data=False,
              **kwargs):
    cube = SpectralCube.read(cube)
    mom0_hdu = cube.moment0().hdu
    pspec = PowerSpectrum(mom0_hdu, distance=distance)
    pspec.run(xunit=xunit, **kwargs)
    if pickle_file:
        pspec.save_results(pickle_file, keep_data=keep_data)
    return pspec
Exemple #4
0
def test_pspec(plotname="pspec_rnoise_beamsmooth_apodizetukey.pdf",
               size=256,
               powerlaw=3.,
               run_kwargs={
                   'verbose': False,
                   'apodize_kernel': 'tukey'
               },
               plot_kwargs={'fit_color': 'black'},
               beam_smooth=True,
               pixel_scale=2 * u.arcsec,
               bmin=8.09 * u.arcsec,
               bmaj=10.01 * u.arcsec,
               bpa=-12.9 * u.deg,
               restfreq=1.4 * u.GHz,
               bunit=u.K):
    from spectral_cube import Projection
    from radio_beam import Beam

    rnoise_img = make_extended(size, powerlaw)
    # Create a FITS HDU
    rnoise_hdu = create_fits_hdu(rnoise_img, 2 * u.arcsec, 2 * u.arcsec,
                                 rnoise_img.shape, 1.4 * u.GHz, u.K)

    pspec = PowerSpectrum(rnoise_hdu)

    if beam_smooth:
        pencil_beam = Beam(0 * u.deg)
        rnoise_proj = Projection.from_hdu(rnoise_hdu).with_beam(pencil_beam)
        new_beam = Beam(bmaj, bmin, bpa)
        rnoise_conv = rnoise_proj.convolve_to(new_beam)

        # hdr = fits.Header(header)
        # rnoise_hdu = fits.PrimaryHDU(rnoise_img, header=hdr)
        pspec = PowerSpectrum(rnoise_conv)

    pspec.run(**run_kwargs)
    pspec.plot_fit(save_name=plotname, **plot_kwargs)

    return pspec
Exemple #5
0
def make_psf_beam_function(kern_fpath):
    # Load in the pspec and use a spline fit of the pspec for the beam
    # model
    kern_pspec = PowerSpectrum.load_results(kern_fpath)

    largest_val = kern_pspec.ps1D[0]
    smallest_freq = kern_pspec.freqs.value[0]

    spl = InterpolatedUnivariateSpline(kern_pspec.freqs.value, kern_pspec.ps1D)

    def beam_model(f):

        beam_vals = np.empty_like(f)
        # beam_vals = T.zeros_like(f)
        # if on scales larger than the kernel image, return
        # value on largest scale
        beam_vals[f < smallest_freq] = largest_val
        beam_vals[f >= smallest_freq] = spl(f[f >= smallest_freq])

        return beam_vals

    return beam_model
Exemple #6
0
def plot_pspec(pspec_file,
               save_fig="pspec.pdf",
               plot_fit_kwargs={
                   "fit_color": "black",
                   'show': False
               },
               refit=False,
               refit_kwargs={
                   "low_cut": 0.01 / u.pix,
                   "high_cut": 0.1 / u.pix,
                   'weighted_fit': False,
               }):
    try:
        pspec = PowerSpectrum.load_results(pspec_file)
    except:
        pass

    if refit:
        pspec.fit_pspec(**refit_kwargs)
        pspec.save_results(pspec_file, keep_data=True)
    pl = pspec.plot_fit(save_name=save_fig, **plot_fit_kwargs)
    plt.gcf().clear()
Exemple #7
0
def pspec_noise(cube="../subcubes/c18o_jrf_l1641n.fits",
                vel_low=12 * u.km / u.s,
                vel_hi=16 * u.km / u.s,
                run_kwargs={},
                pspec_file="pspec_noise_c18o_jrf_l1641n"):
    try:
        cube = SpectralCube.read(cube)
    except:
        pass

    noise_hdu = cube.spectral_slab(vel_low, vel_hi).moment0().hdu
    noise_pspec = PowerSpectrum(noise_hdu)
    noise_pspec.run(**run_kwargs)

    noise_pspec.save_results(pspec_file)

    return noise_pspec
        vca = VCA(cube_dsamp).run(low_cut=1 / (100 * u.pix),
                                  high_cut=(1 / 4.) / u.pix,
                                  fit_2D_kwargs={"fix_ellip_params": True},
                                  verbose=False,
                                  fit_2D=True)
        # plt.draw()
        # input((sl_pix, vca.slope, vca.slope2D, vca.ellip2D, vca.ellip2D_err))
        # plt.clf()
        vca_slopes.append(vca.slope)
        vca_slopes_2D.append(vca.slope2D)
        vel_size.append(np.abs(np.diff(cube_dsamp.spectral_axis.value))[:1])

    # pspec = PowerSpectrum(mom0).run(low_cut=1 / (64 * u.pix),
    #                                 high_cut=0.5 / u.pix, verbose=False)
    pspec = PowerSpectrum(mom0).run(low_cut=1 / (100 * u.pix),
                                    high_cut=(1 / 4.) / u.pix,
                                    fit_2D_kwargs={"fix_ellip_params": True},
                                    verbose=False)

    outputs['VCA'].append(vca_slopes)
    outputs['VCA_2D'].append(vca_slopes_2D)

    outputs['pspec'].append(pspec.slope)
    outputs['pspec_2D'].append(pspec.slope2D)

    vel_size.append(np.ptp(cube.spectral_axis.value))

    axes[0].semilogx(vel_size,
                     vca_slopes + [pspec.slope],
                     '-',
                     label='{}'.format(rep + 1),
                     marker=markers[rep])
Exemple #9
0
    rnoise_img = make_extended(256, powerlaw=3.)

    pixel_scale = 3 * u.arcsec
    beamfwhm = 3 * u.arcsec
    imshape = rnoise_img.shape
    restfreq = 1.4 * u.GHz
    bunit = u.K

    plaw_hdu = create_fits_hdu(rnoise_img, pixel_scale, beamfwhm, imshape,
                               restfreq, bunit)

    plt.imshow(plaw_hdu.data, cmap='viridis')
    plt.savefig(osjoin(fig_path, "rednoise_slope3_img.png"))
    plt.close()

    pspec = PowerSpectrum(plaw_hdu)
    pspec.run(verbose=True,
              radial_pspec_kwargs={'binsize': 1.0},
              fit_kwargs={'weighted_fit': False},
              fit_2D=False,
              low_cut=1. / (60 * u.pix),
              save_name=osjoin(fig_path, "rednoise_pspec_slope3.png"))

    pspec_partial = PowerSpectrum(rnoise_img[:128, :128],
                                  header=plaw_hdu.header)
    pspec_partial.run(verbose=False, fit_2D=False, low_cut=1 / (60. * u.pix))
    plt.imshow(np.log10(pspec_partial.ps2D))
    plt.savefig(osjoin(fig_path, "rednoise_pspec_slope3_2D_slicecross.png"))
    plt.close()

    pspec2 = PowerSpectrum(plaw_hdu)
Exemple #10
0
mvc.run()

mvc_val = mvc.ps1D
mvc_slope = mvc.slope
mvc_slope2D = mvc.slope2D

# Spatial Power Spectrum/ Bispectrum

from turbustat.statistics import (PSpec_Distance, Bispectrum_Distance,
                                  Bispectrum, PowerSpectrum)

pspec_distance = \
    PSpec_Distance(dataset1["moment0"],
                   dataset2["moment0"]).distance_metric()

pspec = PowerSpectrum(dataset1['moment0'])
pspec.run()

pspec_val = pspec.ps1D
pspec_slope = pspec.slope
pspec_slope2D = pspec.slope2D

bispec_distance = \
    Bispectrum_Distance(dataset1["moment0"],
                        dataset2["moment0"]).distance_metric()

bispec_val = bispec_distance.bispec1.bicoherence

azimuthal_slice = bispec_distance.bispec1.azimuthal_slice(16, 10,
                                                          value='bispectrum_logamp',
                                                          bin_width=5 * u.deg)
Exemple #11
0
slopes_wrap_err = []
slopes_pspec_err = []
slopes_wave_err = []

size = 256

for slope in np.arange(0.5, 4.5, 0.5):
    test_img = fits.PrimaryHDU(make_extended(size, powerlaw=slope))
    # The power-law behaviour continues up to ~1/4 of the size
    delvar = DeltaVariance(test_img).run(xlow=3 * u.pix,
                                         xhigh=0.25 * size * u.pix,
                                         boundary='wrap')
    slopes_wrap.append(delvar.slope)
    slopes_wrap_err.append(delvar.slope_err)

    pspec = PowerSpectrum(test_img)
    pspec.run(fit_2D=False, radial_pspec_kwargs={'binsize': 2.0},
              fit_kwargs={'weighted_fit': False},
              low_cut=1 / (15 * u.pix),
              verbose=False)
    # plt.draw()
    # input("{0} {1}?".format(slope, - pspec.slope))
    # plt.clf()
    slopes_pspec.append(-pspec.slope)
    slopes_pspec_err.append(pspec.slope_err)

    wave = Wavelet(test_img).run(xhigh=0.15 * size * u.pix,
                                 xlow=0.02 * size * u.pix)
    slopes_wave.append(wave.slope)
    slopes_wave_err.append(wave.slope_err)
Exemple #12
0
import astropy.units as u

plt.rcParams['axes.unicode_minus'] = False

size = 512
slope = 3

test_img = fits.PrimaryHDU(
    make_extended(size,
                  powerlaw=slope,
                  ellip=0.4,
                  theta=(60 * u.deg).to(u.rad),
                  randomseed=345987))

# The power-law behaviour continues up to ~1/4 of the size
pspec = PowerSpectrum(test_img)
pspec.run(fit_2D=True,
          radial_pspec_kwargs={'binsize': 2.0},
          fit_kwargs={'weighted_fit': False},
          low_cut=1 / (15 * u.pix),
          verbose=False)

print("{0}+/-{1}".format(pspec.slope, pspec.slope_err))
print("{0}+/-{1}".format(pspec.slope2D, pspec.slope2D_err))
print("{0}+/-{1}".format(pspec.ellip2D, pspec.ellip2D_err))
print("{0}+/-{1}".format(pspec.theta2D, pspec.theta2D_err))

# pspec.plot_fit(show_2D=True)

width = 8.75
# fig_ratio = (4.4 / 6.4) / 2
    pdf_mom0.run(verbose=True, model=stats.pareto, fit_type='mle', floc=False,
                 save_name=osjoin(fig_path, "pdf_design4_mom0_plaw.png"))

    cube = SpectralCube.read(osjoin(data_path, "Design4_flatrho_0021_00_radmc.fits"))
    pdf_cube = PDF(cube)
    pdf_cube.run(verbose=True, do_fit=False,
                 save_name=osjoin(fig_path, "pdf_design4.png"))

# PSpec
if run_pspec:

    from turbustat.statistics import PowerSpectrum

    moment0 = fits.open(osjoin(data_path, "Design4_flatrho_0021_00_radmc_moment0.fits"))[0]

    pspec = PowerSpectrum(moment0, distance=250 * u.pc)
    pspec.run(verbose=True, xunit=u.pix**-1,
              save_name=osjoin(fig_path, "design4_pspec.png"))

    pspec.run(verbose=True, xunit=u.pix**-1,
              low_cut=0.025 / u.pix, high_cut=0.1 / u.pix,
              save_name=osjoin(fig_path, "design4_pspec_limitedfreq.png"))

    print(pspec.slope2D, pspec.slope2D_err)
    print(pspec.ellip2D, pspec.ellip2D_err)
    print(pspec.theta2D, pspec.theta2D_err)

    # How about fitting a break?
    pspec = PowerSpectrum(moment0, distance=250 * u.pc)
    pspec.run(verbose=True, xunit=u.pc**-1,
              low_cut=0.025 / u.pix, high_cut=0.4 / u.pix, fit_2D=False,
Exemple #14
0
def generate_unitvals():

    import numpy as np
    import astropy.units as u

    # The machine producing these values should have emcee installed!
    try:
        import emcee
    except ImportError:
        raise ImportError("Install emcee to generate unit test data.")

    from turbustat.tests._testing_data import dataset1, dataset2

    # Wavelet Transform

    from turbustat.statistics import Wavelet_Distance, Wavelet

    wavelet_distance = \
        Wavelet_Distance(dataset1["moment0"],
                         dataset2["moment0"]).distance_metric()

    wavelet_val = wavelet_distance.wt1.values
    wavelet_slope = wavelet_distance.wt1.slope

    # Wavelet with a break
    wave_break = Wavelet(dataset1['moment0']).run(xhigh=7 * u.pix,
                                                  brk=5.5 * u.pix)

    wavelet_slope_wbrk = wave_break.slope
    wavelet_brk_wbrk = wave_break.brk.value

    # MVC

    from turbustat.statistics import MVC_Distance, MVC

    mvc_distance = MVC_Distance(dataset1, dataset2).distance_metric()

    mvc = MVC(dataset1["centroid"], dataset1["moment0"], dataset1["linewidth"])
    mvc.run()

    mvc_val = mvc.ps1D
    mvc_slope = mvc.slope
    mvc_slope2D = mvc.slope2D

    # Spatial Power Spectrum/ Bispectrum

    from turbustat.statistics import (PSpec_Distance, Bispectrum_Distance,
                                      Bispectrum, PowerSpectrum)

    pspec_distance = \
        PSpec_Distance(dataset1["moment0"],
                       dataset2["moment0"]).distance_metric()

    pspec = PowerSpectrum(dataset1['moment0'])
    pspec.run()

    pspec_val = pspec.ps1D
    pspec_slope = pspec.slope
    pspec_slope2D = pspec.slope2D

    bispec_distance = \
        Bispectrum_Distance(dataset1["moment0"],
                            dataset2["moment0"]).distance_metric()

    bispec_val = bispec_distance.bispec1.bicoherence

    azimuthal_slice = bispec_distance.bispec1.azimuthal_slice(
        16, 10, value='bispectrum_logamp', bin_width=5 * u.deg)
    bispec_azim_bins = azimuthal_slice[16][0]
    bispec_azim_vals = azimuthal_slice[16][1]
    bispec_azim_stds = azimuthal_slice[16][2]

    bispec_meansub = Bispectrum(dataset1['moment0'])
    bispec_meansub.run(mean_subtract=True)

    bispec_val_meansub = bispec_meansub.bicoherence

    # Genus

    from turbustat.statistics import GenusDistance, Genus

    smooth_scales = np.linspace(1.0, 0.1 * min(dataset1["moment0"][0].shape),
                                5)

    genus_distance = \
        GenusDistance(dataset1["moment0"],
                      dataset2["moment0"],
                      lowdens_percent=20,
                      genus_kwargs=dict(match_kernel=True)).distance_metric()

    # The distance method requires standardizing the data. Make a
    # separate version that isn't
    genus = Genus(dataset1['moment0'], smoothing_radii=smooth_scales)
    genus.run(match_kernel=True)

    genus_val = genus.genus_stats

    # Delta-Variance

    from turbustat.statistics import DeltaVariance_Distance, DeltaVariance

    delvar_distance = \
        DeltaVariance_Distance(dataset1["moment0"],
                               dataset2["moment0"],
                               weights1=dataset1["moment0_error"][0],
                               weights2=dataset2["moment0_error"][0],
                               delvar_kwargs=dict(xhigh=11 * u.pix))

    delvar_distance.distance_metric()

    delvar = DeltaVariance(dataset1["moment0"],
                           weights=dataset1['moment0_error'][0]).run(xhigh=11 *
                                                                     u.pix)

    delvar_val = delvar.delta_var
    delvar_slope = delvar.slope

    # Test with a break point
    delvar_wbrk = \
      DeltaVariance(dataset1["moment0"],
                    weights=dataset1['moment0_error'][0]).run(xhigh=11 * u.pix,
                                                              brk=6 * u.pix)

    delvar_slope_wbrk = delvar_wbrk.slope
    delvar_brk = delvar_wbrk.brk.value

    # Change boundary conditions

    delvar_fill = \
        DeltaVariance(dataset1["moment0"],
                      weights=dataset1['moment0_error'][0]).run(xhigh=11 * u.pix,
                                                                boundary='fill',
                                                                nan_treatment='interpolate')

    delvar_fill_val = delvar_fill.delta_var
    delvar_fill_slope = delvar_fill.slope

    # VCA/VCS

    from turbustat.statistics import VCA_Distance, VCS_Distance, VCA

    vcs_distance = VCS_Distance(dataset1["cube"],
                                dataset2["cube"],
                                fit_kwargs=dict(high_cut=0.3 / u.pix,
                                                low_cut=3e-2 / u.pix))
    vcs_distance.distance_metric()

    vcs_val = vcs_distance.vcs1.ps1D
    vcs_slopes = vcs_distance.vcs1.slope

    vca_distance = VCA_Distance(dataset1["cube"],
                                dataset2["cube"]).distance_metric()

    vca = VCA(dataset1['cube'])
    vca.run()

    vca_val = vca.ps1D
    vca_slope = vca.slope
    vca_slope2D = vca.slope2D

    # Tsallis

    from turbustat.statistics import Tsallis

    tsallis_kwargs = {"sigma_clip": 5, "num_bins": 100}

    tsallis = Tsallis(dataset1['moment0'], lags=[1, 2, 4, 8, 16] * u.pix)
    tsallis.run(periodic=True, **tsallis_kwargs)

    tsallis_val = tsallis.tsallis_params
    tsallis_stderrs = tsallis.tsallis_stderrs

    tsallis_noper = Tsallis(dataset1['moment0'], lags=[1, 2, 4, 8, 16] * u.pix)
    tsallis_noper.run(periodic=False, num_bins=100)

    tsallis_val_noper = tsallis_noper.tsallis_params

    # High-order stats

    from turbustat.statistics import StatMoments_Distance, StatMoments

    moment_distance = \
        StatMoments_Distance(dataset1["moment0"],
                             dataset2["moment0"]).distance_metric()

    kurtosis_val = moment_distance.moments1.kurtosis_hist[1]
    skewness_val = moment_distance.moments1.skewness_hist[1]

    # Save a few from the non-distance version
    tester = StatMoments(dataset1["moment0"])
    tester.run()

    kurtosis_nondist_val = tester.kurtosis_hist[1]
    skewness_nondist_val = tester.skewness_hist[1]

    # Non-periodic
    tester = StatMoments(dataset1["moment0"])
    tester.run(periodic=False)

    kurtosis_nonper_val = tester.kurtosis_hist[1]
    skewness_nonper_val = tester.skewness_hist[1]

    # PCA

    from turbustat.statistics import PCA_Distance, PCA
    pca_distance = PCA_Distance(dataset1["cube"],
                                dataset2["cube"]).distance_metric()

    pca = PCA(dataset1["cube"], distance=250 * u.pc)
    pca.run(mean_sub=True,
            eigen_cut_method='proportion',
            min_eigval=0.75,
            spatial_method='contour',
            spectral_method='walk-down',
            fit_method='odr',
            brunt_beamcorrect=False,
            spectral_output_unit=u.m / u.s)

    pca_val = pca.eigvals
    pca_spectral_widths = pca.spectral_width().value
    pca_spatial_widths = pca.spatial_width().value

    pca_fit_vals = {
        "index": pca.index,
        "gamma": pca.gamma,
        "intercept": pca.intercept().value,
        "sonic_length": pca.sonic_length()[0].value
    }

    # Now get those values using mcmc
    pca.run(mean_sub=True,
            eigen_cut_method='proportion',
            min_eigval=0.75,
            spatial_method='contour',
            spectral_method='walk-down',
            fit_method='bayes',
            brunt_beamcorrect=False,
            spectral_output_unit=u.m / u.s)

    pca_fit_vals["index_bayes"] = pca.index
    pca_fit_vals["gamma_bayes"] = pca.gamma
    pca_fit_vals["intercept_bayes"] = pca.intercept().value
    pca_fit_vals["sonic_length_bayes"] = pca.sonic_length()[0].value

    # Record the number of eigenvalues kept by the auto method
    pca.run(mean_sub=True,
            n_eigs='auto',
            min_eigval=0.001,
            eigen_cut_method='value',
            decomp_only=True)

    pca_fit_vals["n_eigs_value"] = pca.n_eigs

    # Now w/ the proportion of variance cut
    pca.run(mean_sub=True,
            n_eigs='auto',
            min_eigval=0.99,
            eigen_cut_method='proportion',
            decomp_only=True)

    pca_fit_vals["n_eigs_proportion"] = pca.n_eigs

    # SCF

    from turbustat.statistics import SCF_Distance, SCF

    scf_distance = SCF_Distance(dataset1["cube"], dataset2["cube"],
                                size=11).distance_metric()

    scf = SCF(dataset1['cube'], size=11).run()

    scf_val = scf.scf_surface
    scf_spectrum = scf.scf_spectrum
    scf_slope = scf.slope
    scf_slope2D = scf.slope2D

    # Now run the SCF when the boundaries aren't continuous
    scf_distance_cut_bound = SCF_Distance(dataset1["cube"],
                                          dataset2["cube"],
                                          size=11,
                                          boundary='cut').distance_metric()
    scf_val_noncon_bound = scf_distance_cut_bound.scf1.scf_surface

    scf_fitlims = SCF(dataset1['cube'], size=11)
    scf_fitlims.run(boundary='continuous', xlow=1.5 * u.pix, xhigh=4.5 * u.pix)

    scf_slope_wlimits = scf_fitlims.slope
    scf_slope_wlimits_2D = scf_fitlims.slope2D

    # Cramer Statistic

    from turbustat.statistics import Cramer_Distance

    cramer_distance = Cramer_Distance(
        dataset1["cube"], dataset2["cube"], noise_value1=0.1,
        noise_value2=0.1).distance_metric(normalize=False)

    cramer_val = cramer_distance.data_matrix1

    # Dendrograms

    from turbustat.statistics import Dendrogram_Distance, Dendrogram_Stats

    min_deltas = np.logspace(-1.5, 0.5, 40)

    dendro_distance = Dendrogram_Distance(
        dataset1["cube"], dataset2["cube"],
        min_deltas=min_deltas).distance_metric()

    dendrogram_val = dendro_distance.dendro1.numfeatures

    # With periodic boundaries
    dendro = Dendrogram_Stats(dataset1['cube'], min_deltas=min_deltas)
    dendro.run(periodic_bounds=True)

    dendrogram_periodic_val = dendro.numfeatures

    # PDF

    from turbustat.statistics import PDF_Distance

    pdf_distance = \
        PDF_Distance(dataset1["moment0"],
                     dataset2["moment0"],
                     min_val1=0.05,
                     min_val2=0.05,
                     weights1=dataset1["moment0_error"][0]**-2.,
                     weights2=dataset2["moment0_error"][0]**-2.,
                     do_fit=False,
                     normalization_type='standardize')

    pdf_distance.distance_metric()

    pdf_val = pdf_distance.PDF1.pdf
    pdf_ecdf = pdf_distance.PDF1.ecdf
    pdf_bins = pdf_distance.bins

    # Do a fitted version of the PDF pca
    pdf_fit_distance = \
        PDF_Distance(dataset1["moment0"],
                     dataset2["moment0"],
                     min_val1=0.05,
                     min_val2=0.05,
                     do_fit=True,
                     normalization_type=None)

    pdf_fit_distance.distance_metric()

    np.savez_compressed('checkVals',
                        wavelet_val=wavelet_val,
                        wavelet_slope=wavelet_slope,
                        wavelet_slope_wbrk=wavelet_slope_wbrk,
                        wavelet_brk_wbrk=wavelet_brk_wbrk,
                        mvc_val=mvc_val,
                        mvc_slope=mvc_slope,
                        mvc_slope2D=mvc_slope2D,
                        pspec_val=pspec_val,
                        pspec_slope=pspec_slope,
                        pspec_slope2D=pspec_slope2D,
                        bispec_val=bispec_val,
                        bispec_azim_bins=bispec_azim_bins,
                        bispec_azim_vals=bispec_azim_vals,
                        bispec_azim_stds=bispec_azim_stds,
                        bispec_val_meansub=bispec_val_meansub,
                        genus_val=genus_val,
                        delvar_val=delvar_val,
                        delvar_slope=delvar_slope,
                        delvar_slope_wbrk=delvar_slope_wbrk,
                        delvar_brk=delvar_brk,
                        delvar_fill_val=delvar_fill_val,
                        delvar_fill_slope=delvar_fill_slope,
                        vcs_val=vcs_val,
                        vcs_slopes=vcs_slopes,
                        vca_val=vca_val,
                        vca_slope=vca_slope,
                        vca_slope2D=vca_slope2D,
                        tsallis_val=tsallis_val,
                        tsallis_stderrs=tsallis_stderrs,
                        tsallis_val_noper=tsallis_val_noper,
                        kurtosis_val=kurtosis_val,
                        skewness_val=skewness_val,
                        kurtosis_nondist_val=kurtosis_nondist_val,
                        skewness_nondist_val=skewness_nondist_val,
                        kurtosis_nonper_val=kurtosis_nonper_val,
                        skewness_nonper_val=skewness_nonper_val,
                        pca_val=pca_val,
                        pca_fit_vals=pca_fit_vals,
                        pca_spectral_widths=pca_spectral_widths,
                        pca_spatial_widths=pca_spatial_widths,
                        scf_val=scf_val,
                        scf_slope_wlimits=scf_slope_wlimits,
                        scf_slope_wlimits_2D=scf_slope_wlimits_2D,
                        scf_val_noncon_bound=scf_val_noncon_bound,
                        scf_spectrum=scf_spectrum,
                        scf_slope=scf_slope,
                        scf_slope2D=scf_slope2D,
                        cramer_val=cramer_val,
                        dendrogram_val=dendrogram_val,
                        dendrogram_periodic_val=dendrogram_periodic_val,
                        pdf_val=pdf_val,
                        pdf_bins=pdf_bins,
                        pdf_ecdf=pdf_ecdf)

    np.savez_compressed(
        'computed_distances',
        mvc_distance=mvc_distance.distance,
        pca_distance=pca_distance.distance,
        vca_distance=vca_distance.distance,
        pspec_distance=pspec_distance.distance,
        scf_distance=scf_distance.distance,
        wavelet_distance=wavelet_distance.distance,
        delvar_curve_distance=delvar_distance.curve_distance,
        delvar_slope_distance=delvar_distance.slope_distance,
        # tsallis_distance=tsallis_distance.distance,
        kurtosis_distance=moment_distance.kurtosis_distance,
        skewness_distance=moment_distance.skewness_distance,
        cramer_distance=cramer_distance.distance,
        genus_distance=genus_distance.distance,
        vcs_distance=vcs_distance.distance,
        bispec_mean_distance=bispec_distance.mean_distance,
        bispec_surface_distance=bispec_distance.surface_distance,
        dendrohist_distance=dendro_distance.histogram_distance,
        dendronum_distance=dendro_distance.num_distance,
        pdf_hellinger_distance=pdf_distance.hellinger_distance,
        pdf_ks_distance=pdf_distance.ks_distance,
        pdf_lognorm_distance=pdf_fit_distance.lognormal_distance)
Exemple #15
0
    cube = SpectralCube.read(
        osjoin(data_path, "Design4_flatrho_0021_00_radmc.fits"))
    pdf_cube = PDF(cube)
    pdf_cube.run(verbose=True,
                 do_fit=False,
                 save_name=osjoin(fig_path, "pdf_design4.png"))

# PSpec
if run_pspec:

    from turbustat.statistics import PowerSpectrum

    moment0 = fits.open(
        osjoin(data_path, "Design4_flatrho_0021_00_radmc_moment0.fits"))[0]

    pspec = PowerSpectrum(moment0, distance=250 * u.pc)
    pspec.run(verbose=True,
              xunit=u.pix**-1,
              save_name=osjoin(fig_path, "design4_pspec.png"))

    pspec.run(verbose=True,
              xunit=u.pix**-1,
              low_cut=0.02 / u.pix,
              high_cut=0.1 / u.pix,
              save_name=osjoin(fig_path, "design4_pspec_limitedfreq.png"))

    print(pspec.slope2D, pspec.slope2D_err)
    print(pspec.ellip2D, pspec.ellip2D_err)
    print(pspec.theta2D, pspec.theta2D_err)

    # How about fitting a break?
    cube = SpectralCube.read(
        osjoin(data_path, "Design4_flatrho_0021_00_radmc.fits"))
    pdf_cube = PDF(cube)
    pdf_cube.run(verbose=True,
                 do_fit=False,
                 save_name=osjoin(fig_path, "pdf_design4.png"))

# PSpec
if run_pspec:

    from turbustat.statistics import PowerSpectrum

    moment0 = fits.open(
        osjoin(data_path, "Design4_flatrho_0021_00_radmc_moment0.fits"))[0]

    pspec = PowerSpectrum(moment0, distance=250 * u.pc)
    pspec.run(verbose=True,
              xunit=u.pix**-1,
              save_name=osjoin(fig_path, "design4_pspec.png"))

    pspec.run(verbose=True,
              xunit=u.pix**-1,
              low_cut=0.025 / u.pix,
              high_cut=0.1 / u.pix,
              save_name=osjoin(fig_path, "design4_pspec_limitedfreq.png"))

    print(pspec.slope2D, pspec.slope2D_err)
    print(pspec.ellip2D, pspec.ellip2D_err)
    print(pspec.theta2D, pspec.theta2D_err)

    # How about fitting a break?
    rnoise_img = make_extended(256, powerlaw=3.)

    pixel_scale = 3 * u.arcsec
    beamfwhm = 3 * u.arcsec
    imshape = rnoise_img.shape
    restfreq = 1.4 * u.GHz
    bunit = u.K

    plaw_hdu = create_fits_hdu(rnoise_img, pixel_scale, beamfwhm, imshape,
                               restfreq, bunit)

    plt.imshow(plaw_hdu.data, cmap='viridis')
    plt.savefig(osjoin(fig_path, "rednoise_slope3_img.png"))
    plt.close()

    pspec = PowerSpectrum(plaw_hdu)
    pspec.run(verbose=True, radial_pspec_kwargs={'binsize': 1.0},
              fit_kwargs={'weighted_fit': False}, fit_2D=False,
              low_cut=1. / (60 * u.pix),
              save_name=osjoin(fig_path, "rednoise_pspec_slope3.png"))

    pspec_partial = PowerSpectrum(rnoise_img[:128, :128], header=plaw_hdu.header)
    pspec_partial.run(verbose=False, fit_2D=False, low_cut=1 / (60. * u.pix))
    plt.imshow(np.log10(pspec_partial.ps2D))
    plt.savefig(osjoin(fig_path, "rednoise_pspec_slope3_2D_slicecross.png"))
    plt.close()

    pspec2 = PowerSpectrum(plaw_hdu)
    pspec2.run(verbose=False, radial_pspec_kwargs={'binsize': 1.0},
               fit_kwargs={'weighted_fit': False}, fit_2D=False,
               low_cut=1. / (60 * u.pix),
do_fitpspec_doub_aco = False

do_makepspec_co = False
do_fitpspec_co = False

do_makepspec_dust_smooth = False
do_fitpspec_dust_smooth = True

if do_makepspec:

    pspec_name = osjoin(data_path, 'M31_CO', 'm33_hi_co_dustSD.pspec.pkl')

    gas_sd = hi_proj * hi_mass_conversion + co10_mass_conversion * co_proj
    gas_sd[np.isnan(hi_proj)] = np.NaN

    pspec = PowerSpectrum(gas_sd, distance=720 * u.kpc)

    pspec.run(verbose=False, fit_2D=False, high_cut=10**-1.3 / u.pix)
    # pspec.plot_fit()

    pspec.save_results(pspec_name)

# Fit the pspec.
if do_fitpspec:

    pspec = PowerSpectrum.load_results(pspec_name)
    pspec.load_beam()

    beam_size = pspec._beam.major.to(u.deg) / pspec._ang_size.to(u.deg)
    beam_size = beam_size.value
    beam_gauss_width = beam_size / np.sqrt(8 * np.log(2))
Exemple #19
0
data_path = os.path.expanduser("~/bigdata/ekoch/Utomo19_LGdust/")

hi_name = osjoin(data_path,
                 "M33_14B-088_HI.clean.image.GBT_feathered.pbcov_gt_0.5_masked.moment0_Kkms.fits")

hi_pspec_name = f"{hi_name.rstrip('fits')}pspec.pkl"


hi_pspec_name_conv = f"{hi_name.rstrip('fits')}conv.pspec.pkl"


if do_makepspec:

    hdu = fits.open(hi_name)

    pspec = PowerSpectrum(hdu[0], distance=840 * u.kpc)

    pspec.run(verbose=False, fit_2D=False)

    pspec.save_results(hi_pspec_name)

if do_fitpspec:

    # Fit the same as the dust column density model

    plot_folder = osjoin(data_path, "{}_plots".format(gal))
    if not os.path.exists(plot_folder):
        os.mkdir(plot_folder)

    nsamp = 6000
        #     input("{}".format(filename))
        #     plt.close()

        # if res_type == 'orig':
        #     save_name = "{0}_{1}_mjysr.pspec.pkl".format(gal.lower(), name)
        # else:
        #     save_name = "{0}_{1}_{2}_mjysr.pspec.pkl".format(gal.lower(), name, res_type)

        # For now skip already saved power-spectra
        # if os.path.exists(osjoin(data_path, 'raw', save_name)) and skip_check:
        #     print("Already saved pspec for {}. Skipping".format(filename))
        #     continue
        # else:
        #     os.system("rm -f {}".format(osjoin(data_path, 'raw', save_name)))

        pspec = PowerSpectrum(proj)  # , distance=dist)
        pspec.run(verbose=False, beam_correct=False, fit_2D=False,
                  high_cut=0.1 / u.pix,
                  use_pyfftw=False, threads=1,)
                  # apodize_kernel=fitinfo_dict[name]['apod_kern'])

        # Plot 2D spec, 1D pspec, img

        im0 = ax[0].imshow(np.log10(pspec.ps2D), origin='lower', cmap='viridis')
        fig.colorbar(im0, ax=ax[0])

        # Convert to angular units
        xunit = u.arcsec**-1
        ang_freqs = pspec._spatial_freq_unit_conversion(pspec.freqs, xunit).value

        ax[1].loglog(ang_freqs, pspec.ps1D)
Exemple #21
0
            continue
        else:
            os.system("rm {}".format(osjoin(data_path, gal, save_name)))

        # We also want to account for the shape of the masked data
        # (mostly for M31)
        # norm_mask = np.isfinite(proj_coldens).astype(np.float) / \
        #     np.isfinite(proj_coldens).sum()
        # pspec_mask = PowerSpectrum(fits.PrimaryHDU(norm_mask, proj_coldens.header),
        #                            distance=fitinfo_dict[gal]['distance'])
        # pspec_mask.run(verbose=False, beam_correct=False, fit_2D=False,
        #                high_cut=0.1 / u.pix,
        #                use_pyfftw=True, threads=ncores,
        #                apodize_kernel=fitinfo_dict[gal]['apod_kern'])

        pspec = PowerSpectrum(proj_coldens,
                              distance=fitinfo_dict[gal]['distance'])
        pspec.compute_pspec(use_pyfftw=True,
                            threads=ncores,
                            apodize_kernel=fitinfo_dict[gal]['apod_kern'])
        # Divide out the normalized mask pspec
        # pspec._ps2D /= pspec_mask.ps2D
        pspec.compute_radial_pspec()
        # pspec.fit_pspec()  # high_cut=0.1 / u.pix,)
        # pspec.run(verbose=False, beam_correct=False, fit_2D=False,
        #           high_cut=0.1 / u.pix,
        #           use_pyfftw=True, threads=ncores,
        #           apodize_kernel=fitinfo_dict[gal]['apod_kern'])

        if make_interactive:
            print(pspec.slope)
            pspec.plot_fit(show_residual=False, show_2D=True)
Exemple #22
0
            # Use broken plaw for MIPS 24 LMC

            if band == 'mips24' and gal == 'LMC':
                fit_params = df_bplaw.loc[
                    f"{gal.lower()}_{band}_{res_type}_mean"]
            else:
                fit_params = df.loc[f"{gal.lower()}_{band}_{res_type}"]

            if res_type == 'orig':
                filename = "{0}_{1}_mjysr.pspec.pkl".format(gal.lower(), band)
            else:
                filename = "{0}_{1}_{2}_mjysr.pspec.pkl".format(
                    gal.lower(), band, res_type)

            pspec = PowerSpectrum.load_results(osjoin(data_path, gal,
                                                      filename))
            pspec.load_beam()

            beam_size = pspec._beam.major.to(u.deg) / pspec._ang_size.to(u.deg)
            beam_size = beam_size.value
            beam_gauss_width = beam_size / np.sqrt(8 * np.log(2))

            if fitinfo_dict[gal][band]['high_cut'] is not None:
                high_cut = fitinfo_dict[gal][band]['high_cut']

            else:
                high_cut = (1 / (beam_gauss_width * 3.))

            # Fit on scales > 3 pixels to avoid flattening from pixelization
            # fit_mask = pspec.freqs.value < 1 / 3.
            # fit_mask = pspec.freqs.value < 0.1
Exemple #23
0
        # Now open the kernel file
        kernfits_name = names[name][0]
        kernfits_ext = names[name][1]

        kernel_filename = osjoin(kern_path, kernfits_name)

        kern_proj = Projection.from_hdu(fits.open(osjoin(kern_path, kernel_filename))[kernfits_ext])

        img_scale = np.abs(proj_plane_pixel_scales(proj.wcs))[0]
        kern_scale = np.abs(proj_plane_pixel_scales(kern_proj.wcs))[0]

        kernel = resize_psf(kern_proj.value, kern_scale, img_scale)

        # Normalize to make a kernel
        kernel /= kernel.sum()

        kern_pspec = PowerSpectrum((kernel, kern_proj.header))
        kern_pspec.run(verbose=False, fit_2D=False)

        save_name = "{0}_kernel_{1}.pspec.pkl".format(name, gal.lower())

        kern_pspec.save_results(osjoin(data_path, gal, save_name),
                                keep_data=True)

        # plt.draw()

        # input("?")

        # # plt.clf()
        # plt.close()
                save_name = "{0}_{1}_{2}_mjysr.pspec.pkl".format(gal.lower(),
                                                                 name, slice_name)
            else:
                save_name = "{0}_{1}_{2}_{3}_mjysr.pspec.pkl".format(gal.lower(),
                                                                     name,
                                                                     res_type,
                                                                     slice_name)

            # For now skip already saved power-spectra
            if os.path.exists(osjoin(data_path, gal, save_name)) and skip_check:
                print("Already saved pspec for {}. Skipping".format(filename))
                continue
            else:
                os.system("rm -f {}".format(osjoin(data_path, gal, save_name)))

            pspec = PowerSpectrum(proj[slicer], distance=dist)
            pspec.run(verbose=False, beam_correct=False, fit_2D=False,
                      high_cut=0.1 / u.pix,
                      use_pyfftw=True, threads=ncores,
                      apodize_kernel='tukey', alpha=0.3)

            pspec.save_results(osjoin(data_path, gal, save_name),
                               keep_data=False)


if do_fit_pspec:

    # Load model functions
    repo_path = os.path.expanduser("~/ownCloud/project_code/DustyPowerSpectra/")
    code_name = os.path.join(repo_path, "models.py")
    exec(compile(open(code_name, "rb").read(), code_name, 'exec'))
    if gal == 'M31':
        # Add 180 deg to the PA
        gal_obj.position_angle += 180 * u.deg

    filename = osjoin(data_path, gal, fitinfo_dict[gal]['filename'])

    hdu = fits.open(filename)

    proj = Projection.from_hdu(hdu)

    # Run on the original image
    filename = "{0}_{1}_{2}_mjysr.pspec.pkl".format(gal.lower(), 'spire500',
                                                    res_type)

    pspec = PowerSpectrum.load_results(osjoin(data_path, gal, filename))
    pspec.load_beam()

    # Deproject the image AND the beam

    deproj_img = deproject(proj, proj.header, gal_obj, inc_correction=True)

    # Cut this image down to the minimal size
    deproj_img = deproj_img[nd.find_objects(np.isfinite(deproj_img))[0]]

    # Just keep the original header. Doesn't matter until the
    # conversion to phys units
    deproj_img_hdu = fits.PrimaryHDU(deproj_img, proj.header)

    # The beam is symmetric so we only need to warp it to match the PSF shape
    # in the deprojected frame
Exemple #26
0
mvc.run()

mvc_val = mvc.ps1D
mvc_slope = mvc.slope
mvc_slope2D = mvc.slope2D

# Spatial Power Spectrum/ Bispectrum

from turbustat.statistics import (PSpec_Distance, BiSpectrum_Distance,
                                  BiSpectrum, PowerSpectrum)

pspec_distance = \
    PSpec_Distance(dataset1["moment0"],
                   dataset2["moment0"]).distance_metric()

pspec = PowerSpectrum(dataset1['moment0'])
pspec.run()

pspec_val = pspec.ps1D
pspec_slope = pspec.slope
pspec_slope2D = pspec.slope2D

bispec_distance = \
    BiSpectrum_Distance(dataset1["moment0"],
                        dataset2["moment0"]).distance_metric()

bispec_val = bispec_distance.bispec1.bicoherence

bispec_meansub = BiSpectrum(dataset1['moment0'])
bispec_meansub.run(mean_subtract=True)
gals = {'M33': 840 * u.kpc}

df = pd.read_csv(osjoin(data_path, "pspec_coldens_fit_results.csv"),
                 index_col=0)
df_hi = pd.read_csv(osjoin(data_path, "pspec_hi_conv_m33_fit_results.csv"),
                    index_col=0)

# Open the pspec files
hi_name = osjoin(
    data_path,
    "M33_14B-088_HI.clean.image.GBT_feathered.pbcov_gt_0.5_masked.moment0_Kkms.fits"
)

hi_pspec_name = f"{hi_name.rstrip('fits')}conv.pspec.pkl"

hi_pspec = PowerSpectrum.load_results(hi_pspec_name)
hi_pspec.load_beam()

dust_pspec_name = osjoin(data_path, 'M33', "m33_coldens.pspec.pkl")

dust_pspec = PowerSpectrum.load_results(dust_pspec_name)
dust_pspec.load_beam()

dust_norm = dust_pspec.ps1D.max()
hi_norm = hi_pspec.ps1D.max()

fig = plt.figure()

ax = fig.add_subplot(111)

gal = 'M33'
            mom0_cutout = mom0[cutout_slice].copy()

            if (np.isnan(mom0_cutout).sum() / float(mom0_cutout.size)) > 0.1:
                fill_nan(fit_results)
                continue

            # Check to see if the pspec file already exists:
            save_pspec_name = osjoin(
                pspec_posn_folder,
                f"{fitinfo_dict[gal]['filename'].rstrip('fits')}_y_{y}_x_{x}_pspec.pkl"
            )

            if not os.path.exists(save_pspec_name):

                pspec = PowerSpectrum(mom0_cutout.hdu,
                                      beam=mom0.beam,
                                      distance=fitinfo_dict[gal]['distance'])
                pspec.compute_pspec(
                    use_pyfftw=False,
                    threads=ncores,
                    apodize_kernel=fitinfo_dict[gal]['apod_kern'],
                    alpha=alpha)
                pspec.compute_radial_pspec()

                pspec.save_results(save_pspec_name)

            else:

                pspec = PowerSpectrum.load_results(save_pspec_name)
                pspec.load_beam()
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import astropy.units as u

plt.rcParams['axes.unicode_minus'] = False

size = 512
slope = 3

test_img = fits.PrimaryHDU(make_extended(size, powerlaw=slope,
                                         ellip=0.4,
                                         theta=(60 * u.deg).to(u.rad),
                                         randomseed=345987))

# The power-law behaviour continues up to ~1/4 of the size
pspec = PowerSpectrum(test_img)
pspec.run(fit_2D=True, radial_pspec_kwargs={'binsize': 2.0},
          fit_kwargs={'weighted_fit': False},
          low_cut=1 / (15 * u.pix),
          verbose=False)

print("{0}+/-{1}".format(pspec.slope, pspec.slope_err))
print("{0}+/-{1}".format(pspec.slope2D, pspec.slope2D_err))
print("{0}+/-{1}".format(pspec.ellip2D, pspec.ellip2D_err))
print("{0}+/-{1}".format(pspec.theta2D, pspec.theta2D_err))

# pspec.plot_fit(show_2D=True)

width = 8.75
# fig_ratio = (4.4 / 6.4) / 2
height = 5.07
    proj_coldens = hdu_coldens[0].data[0][nd.find_objects(coldens_mask)[0]]

    hi_satpt = 10.  # Msol / pc^-2
    # hi_satpt = 15. # Msol / pc^-2

    # dense gas to dust conversion of 340 for LMC from Roman-Duval+2014

    gdr = fitinfo_dict[gal]['GDR']

    dust_satpt = (hi_satpt / gdr) / fitinfo_dict[gal]['cosinc'].value
    sat_mask = proj_coldens >= dust_satpt

    dust_sat = proj_coldens.copy()
    dust_sat[sat_mask] = dust_satpt

    pspec = PowerSpectrum(fits.PrimaryHDU(proj_coldens, hdr),
                          distance=fitinfo_dict[gal]['distance'])
    pspec.run(verbose=False, fit_2D=False)

    pspec_sat = PowerSpectrum(fits.PrimaryHDU(dust_sat, hdr),
                              distance=fitinfo_dict[gal]['distance'])
    pspec_sat.run(verbose=False, fit_2D=False)

    beam_size = pspec_sat._beam.major.to(u.deg) / pspec_sat._ang_size.to(u.deg)
    beam_size = beam_size.value
    beam_gauss_width = beam_size / np.sqrt(8 * np.log(2))

    high_cut = (1 / (beam_gauss_width * 3.))

    fit_mask = pspec_sat.freqs.value < high_cut

    # And cut out the largest scales due to expected deviations with
Exemple #31
0
    filename = osjoin(data_path, gal, fitinfo_dict[gal]['filename'])

    hdu_coldens = fits.open(filename)

    proj_coldens = Projection.from_hdu(
        fits.PrimaryHDU(hdu_coldens[0].data[0].squeeze(),
                        hdu_coldens[0].header))

    # Get minimal size
    proj_coldens = proj_coldens[nd.find_objects(np.isfinite(proj_coldens))[0]]

    proj_coldens = proj_coldens.with_beam(fitinfo_dict[gal]['beam'])

    # Run on the original image

    pspec = PowerSpectrum(proj_coldens, distance=fitinfo_dict[gal]['distance'])
    pspec.run(
        verbose=False,
        beam_correct=False,
        fit_2D=False,
        high_cut=0.1 / u.pix,
        use_pyfftw=True,
        threads=ncores,
        # radial_pspec_kwargs={"theta_0": gal_obj.position_angle + 90*u.deg, "delta_theta": 50 * u.deg},
        # radial_pspec_kwargs={"logspacing": True, 'binsize': 10.},
        apodize_kernel=fitinfo_dict[gal]['apod_kern'])

    # Deproject the image AND the beam

    deproj_img = deproject(proj_coldens,
                           proj_coldens.header,
df = pd.read_csv(osjoin(data_path, "pspec_spire500_deproj_fit_results.csv"), index_col=0)

fig, axes = plt.subplots(1, 2)

for i, (gal, ax) in enumerate(zip(gals, axes.ravel())):

    for ch in ['orig', 'dep']:

        fit_params = df.loc[f"{gal.lower()}_{ch}"]

        if ch == 'orig':
            filename = "{0}_spire500_mod_mjysr.pspec.pkl".format(gal.lower())
        else:
            filename = "{0}_spire500_mod_mjysr_deproj.pspec.pkl".format(gal.lower())

        pspec = PowerSpectrum.load_results(osjoin(data_path, gal, filename))
        pspec.load_beam()

        beam_size = pspec._beam.major.to(u.deg) / pspec._ang_size.to(u.deg)
        beam_size = beam_size.value
        beam_gauss_width = beam_size / np.sqrt(8 * np.log(2))

        high_cut = (1 / (beam_gauss_width * 1.5))
        fit_mask = pspec.freqs.value < high_cut

        # And cut out the largest scales due to expected deviations with
        # small stddev
        fit_mask[:2] = False

        freqs = pspec.freqs.value[fit_mask]
        beam_freqs = pspec.freqs.value[fit_mask]
Exemple #33
0
            out_filename = "{}_cutout.fits".format(filename.rstrip(".fits"))

            if not os.path.exists(osjoin(data_path, gal, out_filename)):
                proj.write(osjoin(data_path, gal, out_filename))

            save_name = f"{out_filename.rstrip('.fits')}.pspec.pkl"

            # For now skip already saved power-spectra
            if os.path.exists(osjoin(data_path, gal,
                                     save_name)) and skip_check:
                print("Already saved pspec for {}. Skipping".format(filename))
                continue
            else:
                os.system("rm -f {}".format(osjoin(data_path, gal, save_name)))

            pspec = PowerSpectrum(proj, distance=dist)
            pspec.run(verbose=False,
                      beam_correct=False,
                      fit_2D=False,
                      high_cut=0.1 / u.pix,
                      use_pyfftw=True,
                      threads=ncores,
                      apodize_kernel=fitinfo_dict[gal][name]['apod_kern'])

            pspec.save_results(osjoin(data_path, gal, save_name),
                               keep_data=False)

            del pspec, proj, hdu

if run_fits:
Exemple #34
0
plt.subplot(324, sharex=ax1)
_ = plt.hist(diff[np.isfinite(diff)])
plt.title("Diff in feathers")

plt.subplot(325, sharex=ax1)
_ = plt.hist(diff_orig_uvc[np.isfinite(diff_orig_uvc)])
plt.title("Orig - uvcombine feather")

plt.subplot(326, sharex=ax1)
_ = plt.hist(diff_orig_casa[np.isfinite(diff_orig_casa)])
plt.title("Orig - CASA feather")

plt.figure()

orig_pspec = PowerSpectrum(input_proj).run(verbose=False, fit_2D=False)
casafeather_pspec = PowerSpectrum(casa_feather_proj).run(verbose=False,
                                                         fit_2D=False)
uvcombfeather_pspec = PowerSpectrum(uvcomb_feather_proj).run(verbose=False,
                                                             fit_2D=False)

sd_pspec = PowerSpectrum(sd_proj).run(verbose=False, fit_2D=False)
sd_pspec_beamcorr = PowerSpectrum(sd_proj).run(verbose=False,
                                               fit_2D=False,
                                               beam_correct=True)
intf_pspec = PowerSpectrum(intf_proj).run(verbose=False, fit_2D=False)

plt.loglog(orig_pspec.freqs.value, orig_pspec.ps1D, label='Original')
plt.loglog(casafeather_pspec.freqs.value,
           casafeather_pspec.ps1D,
           label='CASA feath')