Ejemplo n.º 1
0
    def main():
        import pyfits as fits
        from myimage_analysis import bin_image
        import matplotlib.pyplot as plt
        import numpy as np

        av_dir = '/d/bip3/ezbc/perseus/data/av/'

        av_data, av_header = fits.getdata(av_dir + \
                                'perseus_av_planck_5arcmin.fits',
                                          header=True)

        av_data[av_data > 1] = np.nan

        av_data_binned, av_header_binned = bin_image(av_data,
                                               binsize=(11, 11),
                                               statistic=np.nanmean,
                                               header=av_header)

        print av_data.shape, av_data_binned.shape

        fits.writeto(av_dir + 'test.fits', av_data_binned, av_header_binned,
                clobber=True)

        if 1:
            plt.imshow(av_data, origin='lower left')
            plt.show()
            plt.imshow(av_data_binned, origin='lower left')
            plt.show()

        hi_dir = '/d/bip3/ezbc/perseus/data/hi/'

        hi_data, hi_header = fits.getdata(hi_dir + \
                    'perseus_hi_galfa_cube_regrid_planckres.fits',
                header=True)

        hi_data[hi_data > 10] = np.nan

        hi_data_binned, hi_header_binned = bin_image(hi_data,
                                               binsize=(1, 11, 11),
                                               statistic=np.nanmean,
                                               header=hi_header)

        fits.writeto(hi_dir + 'test.fits', hi_data_binned, hi_header_binned,
                clobber=True)

        plt.imshow(hi_data[500], origin='lower left')
        plt.colorbar()
        plt.show()
        plt.imshow(hi_data_binned[500], origin='lower left')
        plt.colorbar()
        plt.show()
Ejemplo n.º 2
0
def plot_co_spectra(results,):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_co_spectra'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    co_filename = cloud.co_filename

    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        co_filename = co_filename.replace('.fits', '_bin.fits')

    exists = \
        check_file(co_filename, clobber=False)

    if not exists:
        co_data, co_header = fits.getdata(co_filename,
                                                      header=True,
                                                      )
        cloud.co_data, cloud.co_header = \
            bin_image(co_data,
                      binsize=(1, cloud.binsize, cloud.binsize),
                      header=co_header,
                      statistic=np.nanmean)

        fits.writeto(cloud.co_filename.replace('.fits', '_bin.fits'),
                     cloud.co_data,
                     cloud.co_header,
                     )
    else:
        cloud.co_data, cloud.co_header = \
            fits.getdata(co_filename,
                                                      header=True,
                                                      )

    cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(cloud.av_filename_bin, header=True)
    cloud.load_region(cloud.region_filename, header=cloud.av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    cloudpy.plot_hi_spectrum(cloud,
                      filename=filename_base + '.png',
                      limits=[-50, 30, -10, 70],
                      plot_co=plot_co,
                      hi_mask=hi_mask,
                      co_mask=co_mask,
                      )
Ejemplo n.º 3
0
def main():

    from myimage_analysis import bin_image, calculate_nhi
    from mycoords import make_velocity_axis
    from astropy.io import fits
    import numpy as np
    import mystats
    import myio
    import pickle

    # Location of models
    #MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_cmode1_3inc/'
    MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_highres_2/'

    # If true, deletes files to be written
    CLOBBER = 0

    os.chdir(MODEL_DIR + 'models')

    # Ddelete gipsy files, leaving only fits files
    filename_delete_list = os.listdir('./')
    for filename in filename_delete_list:
        if '.image' in filename or '.descr' in filename:
            os.system('rm -rf ' + filename)


    # get leftover fits files
    filename_init_list = os.listdir('./')

    filename_list = []
    for filename in filename_init_list:
        if 'model' in filename and 'regrid' not in filename:
            filename_list.append(filename)

    #filename_list = filename_list[:5]

    stats = {
            'logL': np.empty(len(filename_list)),
            'model_names': [],
            'std': np.empty(len(filename_list)),
            'mean_abs_resid': np.empty(len(filename_list)),
            'sum_abs_resid': np.empty(len(filename_list)),
            }

    cube_name = '749237_rebin_cube_regrid.fits'
    unbinned_cube_name = '749237_rebin_cube.fits'
    cube_error_name = '749237_rebin_cube_error_regrid.fits'
    data, header = \
        fits.getdata(MODEL_DIR + \
                     cube_name,
                     header=True)
    error = \
        fits.getdata(MODEL_DIR + \
                     cube_error_name)

    data_sum = np.nansum(data)
    #binsize = 6

    _, unbinned_header = fits.getdata(MODEL_DIR + '../' + \
                                      unbinned_cube_name,
                                      header=True)
    beamsize = unbinned_header['BMAJ']
    cdelt = np.abs(unbinned_header['CDELT1'])
    binsize = int(beamsize / cdelt)

    mask = (np.isnan(data) | np.isnan(error))

    # Load the images into miriad
    for i, model_name in enumerate(filename_list):

        model_bin_name = model_name.replace('.FITS', '_regrid.FITS')

        exists = myio.check_file(model_bin_name, clobber=CLOBBER)
        if exists:
            print('Loading cube:\n' + model_bin_name)
            model_bin = fits.getdata(model_bin_name)
        else:
            print('Binning cube:\n' + model_name)

            model = fits.getdata(model_name)

            print('\tBinsize = ' + str(binsize))

            # bin the model
            model_bin = bin_image(model,
                                  binsize=(1, binsize, binsize),
                                  statistic=np.nanmean,
                                  quick_bin=True
                                  )

            # normalize the model to have same sum as data
            model_bin = model_bin / np.nansum(model_bin) * data_sum
            #assert np.nansum(model_bin) == data_sum

            # write the model to a file
            fits.writeto(model_bin_name,
                         model_bin,
                         header,
                         clobber=CLOBBER)

        residuals = model_bin[~mask] - data[~mask]
        stats['model_names'].append(model_bin_name)
        stats['logL'][i] = mystats.calc_logL(model_bin[~mask],
                                             data[~mask],
                                             data_error=error[~mask])
        stats['std'][i] = np.nanstd(residuals)
        stats['mean_abs_resid'][i] = np.nanmean(np.abs(residuals))
        stats['sum_abs_resid'][i] = np.nansum(np.abs(residuals))

    with open(MODEL_DIR + 'statistics.pickle', 'wb') as f:
        pickle.dump(stats, f)

    with open(MODEL_DIR + 'statistics.pickle', 'rb') as f:
        stats = pickle.load(f)
Ejemplo n.º 4
0
def test_bin_image():

    import numpy as np
    from numpy.testing import assert_array_almost_equal
    from myimage_analysis import bin_image

    unbinned = np.arange(0, 12, 1).reshape((3, 4))
    #array([[ 0,  1,  2,  3],
    #       [ 4,  5,  6,  7],
    #       [ 8,  9, 10, 11]])

    binned = bin_image(unbinned, binsize=(2, 2), statistic=np.nansum)

    answer = np.array([[10, 18],
                       ])

    assert_array_almost_equal(binned, answer)

    # bin whole axis
    binned = bin_image(unbinned, binsize=(3, 1), statistic=np.nansum)

    answer = np.array([[12, 15, 18, 21],
                       ])

    assert_array_almost_equal(binned, answer)

    # Bin
    binned = bin_image(unbinned, binsize=(1, 2), statistic=np.nansum)

    answer = np.array([[1, 5],
                       [9, 13],
                       [17, 21]])

    assert_array_almost_equal(binned, answer)

    # Test binsize being a float
    binned = bin_image(unbinned, binsize=(1.1, 2.0), statistic=np.nansum)

    answer = np.array([[1, 5],
                       [9, 13],
                       [17, 21]])

    assert_array_almost_equal(binned, answer)

    # Bin
    if 0:
        def statistic(image, axis=None):
            return np.nansum(image, axis=axis)

        binned = bin_image(unbinned, binsize=(1, 2), statistic=statistic)

        answer = np.array([[1, 13],
                           [41, 85],
                           [145, 221]])

        assert_array_almost_equal(binned, answer)

    m = np.arange(0,100,1).reshape((10,10))
    n = bin_image(m, binsize=(2,2))

    answer = np.array([[ 22,  30,  38,  46,  54],
                       [102, 110, 118, 126, 134],
                       [182, 190, 198, 206, 214],
                       [262, 270, 278, 286, 294],
                       [342, 350, 358, 366, 374]])
    assert_array_almost_equal(n, answer)
Ejemplo n.º 5
0
def plot_hi_spectrum(cloud_results, plot_co=1):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_hi_spectrum'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    if plot_co:

        co_filename = cloud.co_filename

        if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
            co_filename = co_filename.replace('.fits', '_bin.fits')

        exists = \
            check_file(co_filename, clobber=False)

        if not exists:
            co_data, co_header = fits.getdata(co_filename,
                                                          header=True,
                                                          )
            cloud.co_data, cloud.co_header = \
                bin_image(co_data,
                          binsize=(1, cloud.binsize, cloud.binsize),
                          header=co_header,
                          statistic=np.nanmean)

            fits.writeto(cloud.co_filename.replace('.fits', '_bin.fits'),
                         cloud.co_data,
                         cloud.co_header,
                         )
        else:
            cloud.co_data, cloud.co_header = \
                fits.getdata(co_filename,
                                                          header=True,
                                                          )

        cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        av_filename = cloud.av_filename_bin
        hi_filename = cloud.hi_filename_bin
    else:
        av_filename = cloud.av_filename
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(av_filename, header=True)
    cloud.hi_data, cloud.hi_header = \
            fits.getdata(hi_filename, header=True)
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    import matplotlib.pyplot as plt
    plt.close(); plt.clf();
    co = np.copy(cloud.co_data[30,:,:])
    co[co_mask] = np.nan
    plt.imshow(co, origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/comap_' + cloud.region + '.png')

    assert all((cloud.hi_data.shape, cloud.co_data.shape,
                cloud.region_mask.shape))

    cloudpy.plot_hi_spectrum(cloud,
                      filename=filename_base + '.png',
                      limits=[-50, 30, -10, 70],
                      plot_co=plot_co,
                      hi_mask=hi_mask,
                      co_mask=co_mask,
                      )
Ejemplo n.º 6
0
def main():

    from myimage_analysis import bin_image, calculate_nhi
    from mycoords import make_velocity_axis
    from astropy.io import fits
    import numpy as np

    os.chdir('/d/bip3/ezbc/shield/749237_lowres/')

    # If true, deletes files to be written
    clobber = 1

    # First, change zeros in lee image to nans
    in_images = ('749237_rebin_cube.fits',)

    # Load the images into miriad
    out_images = []
    for in_image in in_images:

        print('Binning cube:\n' + in_image)

        cube, header = fits.getdata(in_image, header=True)

        # set freq0 setting
        header['FREQ0'] = 1.4204058E+09
        header['RESTFREQ'] = 1.4204058E+09
        header['CTYPE3'] = 'VELO'

        beamsize = header['BMAJ']
        cdelt = np.abs(header['CDELT1'])
        binsize = int(beamsize / cdelt)

        print('\tBinsize = ' + str(binsize))


        if 1:
            # cube measurement error = 700 uJy/Beam = 0.7 mJy/Beam
            # cube flux calibration error = 10%
            # add errors quadratically
            cube_std = np.nanstd(cube[0, :, :])
            cube_error = ((0.1 * cube)**2 + cube_std**2)**0.5

        cube_bin, header_bin = bin_image(cube,
                                         binsize=(1, binsize, binsize),
                                         header=header,
                                         statistic=np.nanmean,
                                         )

        # cube measurement error = 700 uJy/Beam = 0.7 mJy/Beam
        # cube flux calibration error = 10%
        # add errors quadratically
        cube_bin_std = np.nanstd(cube_bin[0, :, :])
        cube_error_bin = ((0.1 * cube_bin)**2 + cube_bin_std**2)**0.5

        if 0:
            noise_func = lambda x: (1 / np.nansum(x**-2))**0.5
            cube_error_bin = bin_image(cube_error,
                                       binsize=(1, binsize, binsize),
                                       statistic=noise_func,
                                       )

        fits.writeto(in_image,
                     cube,
                     header,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube', 'cube_error'),
                     cube_error,
                     header,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube.fits', 'cube_regrid.fits'),
                     cube_bin,
                     header_bin,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube', 'cube_error_regrid'),
                     cube_error_bin,
                     header_bin,
                     clobber=clobber)
        #else:
        #    cube_bin, header_bin = \
        #        fits.getdata(in_image.replace('cube.fits', 'cube_regrid.fits'),
        #                     clobber=clobber, header=True)

        # make nhi_image
        velocity_axis = make_velocity_axis(header_bin)

        # convert to T_B
        cube_bin_tb = 1.36 * 21**2 * cube_bin * 1000.0 / \
                      (header_bin['BMAJ'] * 3600.) / \
                      (3600. * header_bin['BMIN'])

        cube_tb = 1.36 * 21**2 * cube * 1000.0 / \
                      (header['BMAJ'] * 3600.) / \
                      (3600. * header['BMIN'])

        # convert moment zero images to column density units.
        #	Recall:  1 K = (7.354E-8)*[Bmaj(")*Bmin(")/lamda^2(m)] Jy/Bm

        #	Here, units of images are Jy/Bm m/s; cellsize = 2";
        #	    lambda = 0.211061140507 m

        #	Thus, for the 21 cm line of Hydrogen, we have:

        #	    1 K = Bmaj(")*Bmin(")/(6.057493205E5) Jy/Bm
        #			---- OR ----
        #	    1 Jy/Bm = (6.057493205E5)/[Bmaj(")*Bmin(")]

        #	Now, recall that: N_HI = (1.8224E18 cm^-2)*[T_b (K)]*int(dv)
        #		-- For moment maps in K km/sec, just input the values
        #		& multiply by coefficient.
        #	   -- Assure that units are Jy/Bm km/sec (i.e., divide by 1000)
        #	   Leave in units of 1E20 cm^-2 by dividing by 1E20:

        #	   For a x beam:
        #               N_HI (cm^-2) = (image) *
        #		[(6.057493205E5)/(*)] * (1/1000) * (1.8224E18 cm^-2) *
        #		(1/1E20)
        #		N_HI (cm^-2) = (image)*

        nhi_image = calculate_nhi(cube_bin_tb,
                                  velocity_axis=velocity_axis,
                                  header=header_bin,
                                  fits_filename=\
                            in_image.replace('cube.fits', 'nhi_regrid.fits')  )
        nhi_image = calculate_nhi(cube_tb,
                                  velocity_axis=velocity_axis,
                                  header=header,
                                  fits_filename=\
                            in_image.replace('cube.fits', 'nhi.fits')  )
Ejemplo n.º 7
0
def plot_co_spectra(results, ):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_co_spectra'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    co_filename = cloud.co_filename

    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        co_filename = co_filename.replace('.fits', '_bin.fits')

    exists = \
        check_file(co_filename, clobber=False)

    if not exists:
        co_data, co_header = fits.getdata(
            co_filename,
            header=True,
        )
        cloud.co_data, cloud.co_header = \
            bin_image(co_data,
                      binsize=(1, cloud.binsize, cloud.binsize),
                      header=co_header,
                      statistic=np.nanmean)

        fits.writeto(
            cloud.co_filename.replace('.fits', '_bin.fits'),
            cloud.co_data,
            cloud.co_header,
        )
    else:
        cloud.co_data, cloud.co_header = \
            fits.getdata(co_filename,
                                                      header=True,
                                                      )

    cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(cloud.av_filename_bin, header=True)
    cloud.load_region(cloud.region_filename, header=cloud.av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    cloudpy.plot_hi_spectrum(
        cloud,
        filename=filename_base + '.png',
        limits=[-50, 30, -10, 70],
        plot_co=plot_co,
        hi_mask=hi_mask,
        co_mask=co_mask,
    )
Ejemplo n.º 8
0
def plot_hi_spectrum(cloud_results, plot_co=1):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_hi_spectrum'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    if plot_co:

        co_filename = cloud.co_filename

        if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
            co_filename = co_filename.replace('.fits', '_bin.fits')

        exists = \
            check_file(co_filename, clobber=False)

        if not exists:
            co_data, co_header = fits.getdata(
                co_filename,
                header=True,
            )
            cloud.co_data, cloud.co_header = \
                bin_image(co_data,
                          binsize=(1, cloud.binsize, cloud.binsize),
                          header=co_header,
                          statistic=np.nanmean)

            fits.writeto(
                cloud.co_filename.replace('.fits', '_bin.fits'),
                cloud.co_data,
                cloud.co_header,
            )
        else:
            cloud.co_data, cloud.co_header = \
                fits.getdata(co_filename,
                                                          header=True,
                                                          )

        cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        av_filename = cloud.av_filename_bin
        hi_filename = cloud.hi_filename_bin
    else:
        av_filename = cloud.av_filename
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(av_filename, header=True)
    cloud.hi_data, cloud.hi_header = \
            fits.getdata(hi_filename, header=True)
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    import matplotlib.pyplot as plt
    plt.close()
    plt.clf()
    co = np.copy(cloud.co_data[30, :, :])
    co[co_mask] = np.nan
    plt.imshow(co, origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/comap_' + cloud.region + '.png')

    assert all(
        (cloud.hi_data.shape, cloud.co_data.shape, cloud.region_mask.shape))

    cloudpy.plot_hi_spectrum(
        cloud,
        filename=filename_base + '.png',
        limits=[-50, 30, -10, 70],
        plot_co=plot_co,
        hi_mask=hi_mask,
        co_mask=co_mask,
    )
Ejemplo n.º 9
0
def main():

    from myimage_analysis import bin_image, calculate_nhi
    from mycoords import make_velocity_axis
    from astropy.io import fits
    import numpy as np
    import mystats
    import myio
    import pickle

    # Location of models
    #MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_cmode1_3inc/'
    MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_highres_2/'

    # If true, deletes files to be written
    CLOBBER = 0

    os.chdir(MODEL_DIR + 'models')

    # Ddelete gipsy files, leaving only fits files
    filename_delete_list = os.listdir('./')
    for filename in filename_delete_list:
        if '.image' in filename or '.descr' in filename:
            os.system('rm -rf ' + filename)

    # get leftover fits files
    filename_init_list = os.listdir('./')

    filename_list = []
    for filename in filename_init_list:
        if 'model' in filename and 'regrid' not in filename:
            filename_list.append(filename)

    #filename_list = filename_list[:5]

    stats = {
        'logL': np.empty(len(filename_list)),
        'model_names': [],
        'std': np.empty(len(filename_list)),
        'mean_abs_resid': np.empty(len(filename_list)),
        'sum_abs_resid': np.empty(len(filename_list)),
    }

    cube_name = '749237_rebin_cube_regrid.fits'
    unbinned_cube_name = '749237_rebin_cube.fits'
    cube_error_name = '749237_rebin_cube_error_regrid.fits'
    data, header = \
        fits.getdata(MODEL_DIR + \
                     cube_name,
                     header=True)
    error = \
        fits.getdata(MODEL_DIR + \
                     cube_error_name)

    data_sum = np.nansum(data)
    #binsize = 6

    _, unbinned_header = fits.getdata(MODEL_DIR + '../' + \
                                      unbinned_cube_name,
                                      header=True)
    beamsize = unbinned_header['BMAJ']
    cdelt = np.abs(unbinned_header['CDELT1'])
    binsize = int(beamsize / cdelt)

    mask = (np.isnan(data) | np.isnan(error))

    # Load the images into miriad
    for i, model_name in enumerate(filename_list):

        model_bin_name = model_name.replace('.FITS', '_regrid.FITS')

        exists = myio.check_file(model_bin_name, clobber=CLOBBER)
        if exists:
            print('Loading cube:\n' + model_bin_name)
            model_bin = fits.getdata(model_bin_name)
        else:
            print('Binning cube:\n' + model_name)

            model = fits.getdata(model_name)

            print('\tBinsize = ' + str(binsize))

            # bin the model
            model_bin = bin_image(model,
                                  binsize=(1, binsize, binsize),
                                  statistic=np.nanmean,
                                  quick_bin=True)

            # normalize the model to have same sum as data
            model_bin = model_bin / np.nansum(model_bin) * data_sum
            #assert np.nansum(model_bin) == data_sum

            # write the model to a file
            fits.writeto(model_bin_name, model_bin, header, clobber=CLOBBER)

        residuals = model_bin[~mask] - data[~mask]
        stats['model_names'].append(model_bin_name)
        stats['logL'][i] = mystats.calc_logL(model_bin[~mask],
                                             data[~mask],
                                             data_error=error[~mask])
        stats['std'][i] = np.nanstd(residuals)
        stats['mean_abs_resid'][i] = np.nanmean(np.abs(residuals))
        stats['sum_abs_resid'][i] = np.nansum(np.abs(residuals))

    with open(MODEL_DIR + 'statistics.pickle', 'wb') as f:
        pickle.dump(stats, f)

    with open(MODEL_DIR + 'statistics.pickle', 'rb') as f:
        stats = pickle.load(f)