def get_results(global_args):

    import myio

    print('\nPerforming analysis on ' + global_args['cloud_name'])
    print('=======================' + '=' * len(global_args['cloud_name']))

    # Get the results filename
    filename_base, global_args = create_filename_base(global_args)
    print('\n\tFilename base = \n\t' + filename_base)
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'
    results_filename = results_dir + \
               'bootstrap_results/' + filename_base + \
               '_bootstrap_results.pickle'
    global_args['results_filename'] = results_filename

    exists = myio.check_file(results_filename)

    # either load or perform analysis
    if global_args['load'] and exists:
        print('\n\tLoading results...')
        results_dict = load_results(global_args['results_filename'])

    else:
        results_dict = run_cloud_analysis(global_args)

    # derive col dens images and statistics on MC sim
    #add_results_analysis(results_dict)

    # calculate errors on dgrs and intercept
    #results_dict['params_summary'] = calc_param_errors(results_dict)

    return results_dict
Esempio n. 2
0
def get_results(global_args):

    import myio

    print('\nPerforming analysis on ' + global_args['cloud_name'])
    print('=======================' + '=' * len(global_args['cloud_name']))

    # Get the results filename
    filename_base, global_args = create_filename_base(global_args)
    print('\n\tFilename base = \n\t' + filename_base)
    results_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    results_filename = results_dir + \
               'bootstrap_results/' + filename_base + \
               '_bootstrap_results.pickle'
    global_args['results_filename'] = results_filename

    exists = myio.check_file(results_filename)

    # either load or perform analysis
    if global_args['load'] and exists:
        print('\n\tLoading results...')
        results_dict = load_results(global_args['results_filename'])

    else:
        results_dict = run_cloud_analysis(global_args)

    # derive col dens images and statistics on MC sim
    #add_results_analysis(results_dict)

    # calculate errors on dgrs and intercept
    #results_dict['params_summary'] = calc_param_errors(results_dict)

    return results_dict
Esempio n. 3
0
def plot_co_spectra(results,):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_co_spectra'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    co_filename = cloud.co_filename

    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        co_filename = co_filename.replace('.fits', '_bin.fits')

    exists = \
        check_file(co_filename, clobber=False)

    if not exists:
        co_data, co_header = fits.getdata(co_filename,
                                                      header=True,
                                                      )
        cloud.co_data, cloud.co_header = \
            bin_image(co_data,
                      binsize=(1, cloud.binsize, cloud.binsize),
                      header=co_header,
                      statistic=np.nanmean)

        fits.writeto(cloud.co_filename.replace('.fits', '_bin.fits'),
                     cloud.co_data,
                     cloud.co_header,
                     )
    else:
        cloud.co_data, cloud.co_header = \
            fits.getdata(co_filename,
                                                      header=True,
                                                      )

    cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(cloud.av_filename_bin, header=True)
    cloud.load_region(cloud.region_filename, header=cloud.av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    cloudpy.plot_hi_spectrum(cloud,
                      filename=filename_base + '.png',
                      limits=[-50, 30, -10, 70],
                      plot_co=plot_co,
                      hi_mask=hi_mask,
                      co_mask=co_mask,
                      )
Esempio n. 4
0
def perform_background_subtraction(av_filename,
                                   background_mask=None,
                                   background_dim=1,
                                   background_filename=None,
                                   background_init=None,
                                   background_region_filename=None):

    # Import external modules
    # -----------------------
    from myio import check_file
    from mycoords import convert_limit_coordinates, get_pix_coords, \
                         hrs2degs, load_ds9_region
    from myimage_analysis import fit_background
    from astropy.io import fits
    import mygeometry as myg

    av_data, av_header = fits.getdata(av_filename, header=True)

    if background_init is not None:
        av_data = av_data - background_init

    file_exists = check_file(background_filename, clobber=False)

    if not file_exists:
        props = {}

        print('writing new background')

        # Load background regions from ds9
        props = load_ds9_region(props,
                                filename=background_region_filename,
                                header=av_header,
                                key='background_regions')

        # Derive relevant region
        background_mask = np.ones(av_data.shape)
        for background_region in props['background_regions']:
            background_vertices = \
              props['background_regions']\
                   [background_region]['poly_verts']['pixel']

            # block off region
            background_mask_temp = ~np.logical_not(
                myg.get_polygon_mask(av_data, background_vertices))

            background_mask[background_mask_temp] = 0
        background_mask = ~np.logical_not(background_mask)

        # Fit the background
        background = fit_background(av_data,
                                    background_mask,
                                    background_dim=background_dim)

        fits.writeto(background_filename, background, av_header)
    else:
        background = fits.getdata(background_filename)

    return background
Esempio n. 5
0
def perform_background_subtraction(av_filename, background_mask=None,
        background_dim=1, background_filename=None, background_init=None,
        background_region_filename=None):

    # Import external modules
    # -----------------------
    from myio import check_file
    from mycoords import convert_limit_coordinates, get_pix_coords, \
                         hrs2degs, load_ds9_region
    from myimage_analysis import fit_background
    from astropy.io import fits
    import mygeometry as myg

    av_data, av_header = fits.getdata(av_filename,
                                      header=True)

    if background_init is not None:
        av_data = av_data - background_init

    file_exists = check_file(background_filename, clobber=False)

    if not file_exists:
        props = {}

        print('writing new background')

        # Load background regions from ds9
        props = load_ds9_region(props,
                                filename=background_region_filename,
                                header=av_header,
                                key='background_regions')

        # Derive relevant region
        background_mask = np.ones(av_data.shape)
        for background_region in props['background_regions']:
            background_vertices = \
              props['background_regions']\
                   [background_region]['poly_verts']['pixel']

            # block off region
            background_mask_temp = ~np.logical_not(myg.get_polygon_mask(av_data,
                                                background_vertices))

            background_mask[background_mask_temp] = 0
        background_mask = ~np.logical_not(background_mask)

        # Fit the background
        background = fit_background(av_data, background_mask,
                background_dim=background_dim)

        fits.writeto(background_filename, background, av_header)
    else:
        background = fits.getdata(background_filename)

    return background
Esempio n. 6
0
def get_core_results(results, clobber=False):

    import pickle
    import myio

    for cloud_name in results:
        cloud_dict = results[cloud_name]
        model_analysis_filename = \
                cloud_dict['model_fitting']['results_filename']
        exists = \
            myio.check_file(model_analysis_filename,
                            clobber=clobber)

        if not exists:
            run_model_analysis(cloud_dict)
        else:
            with open(model_analysis_filename, 'rb') as f:
                cloud_dict['model_fitting'] = pickle.load(f)
            f.close()
def main():

    from myimage_analysis import bin_image, calculate_nhi
    from mycoords import make_velocity_axis
    from astropy.io import fits
    import numpy as np
    import mystats
    import myio
    import pickle

    # Location of models
    #MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_cmode1_3inc/'
    MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_highres_2/'

    # If true, deletes files to be written
    CLOBBER = 0

    os.chdir(MODEL_DIR + 'models')

    # Ddelete gipsy files, leaving only fits files
    filename_delete_list = os.listdir('./')
    for filename in filename_delete_list:
        if '.image' in filename or '.descr' in filename:
            os.system('rm -rf ' + filename)


    # get leftover fits files
    filename_init_list = os.listdir('./')

    filename_list = []
    for filename in filename_init_list:
        if 'model' in filename and 'regrid' not in filename:
            filename_list.append(filename)

    #filename_list = filename_list[:5]

    stats = {
            'logL': np.empty(len(filename_list)),
            'model_names': [],
            'std': np.empty(len(filename_list)),
            'mean_abs_resid': np.empty(len(filename_list)),
            'sum_abs_resid': np.empty(len(filename_list)),
            }

    cube_name = '749237_rebin_cube_regrid.fits'
    unbinned_cube_name = '749237_rebin_cube.fits'
    cube_error_name = '749237_rebin_cube_error_regrid.fits'
    data, header = \
        fits.getdata(MODEL_DIR + \
                     cube_name,
                     header=True)
    error = \
        fits.getdata(MODEL_DIR + \
                     cube_error_name)

    data_sum = np.nansum(data)
    #binsize = 6

    _, unbinned_header = fits.getdata(MODEL_DIR + '../' + \
                                      unbinned_cube_name,
                                      header=True)
    beamsize = unbinned_header['BMAJ']
    cdelt = np.abs(unbinned_header['CDELT1'])
    binsize = int(beamsize / cdelt)

    mask = (np.isnan(data) | np.isnan(error))

    # Load the images into miriad
    for i, model_name in enumerate(filename_list):

        model_bin_name = model_name.replace('.FITS', '_regrid.FITS')

        exists = myio.check_file(model_bin_name, clobber=CLOBBER)
        if exists:
            print('Loading cube:\n' + model_bin_name)
            model_bin = fits.getdata(model_bin_name)
        else:
            print('Binning cube:\n' + model_name)

            model = fits.getdata(model_name)

            print('\tBinsize = ' + str(binsize))

            # bin the model
            model_bin = bin_image(model,
                                  binsize=(1, binsize, binsize),
                                  statistic=np.nanmean,
                                  quick_bin=True
                                  )

            # normalize the model to have same sum as data
            model_bin = model_bin / np.nansum(model_bin) * data_sum
            #assert np.nansum(model_bin) == data_sum

            # write the model to a file
            fits.writeto(model_bin_name,
                         model_bin,
                         header,
                         clobber=CLOBBER)

        residuals = model_bin[~mask] - data[~mask]
        stats['model_names'].append(model_bin_name)
        stats['logL'][i] = mystats.calc_logL(model_bin[~mask],
                                             data[~mask],
                                             data_error=error[~mask])
        stats['std'][i] = np.nanstd(residuals)
        stats['mean_abs_resid'][i] = np.nanmean(np.abs(residuals))
        stats['sum_abs_resid'][i] = np.nansum(np.abs(residuals))

    with open(MODEL_DIR + 'statistics.pickle', 'wb') as f:
        pickle.dump(stats, f)

    with open(MODEL_DIR + 'statistics.pickle', 'rb') as f:
        stats = pickle.load(f)
Esempio n. 8
0
def plot_hi_spectrum(cloud_results, plot_co=1):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_hi_spectrum'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    if plot_co:

        co_filename = cloud.co_filename

        if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
            co_filename = co_filename.replace('.fits', '_bin.fits')

        exists = \
            check_file(co_filename, clobber=False)

        if not exists:
            co_data, co_header = fits.getdata(co_filename,
                                                          header=True,
                                                          )
            cloud.co_data, cloud.co_header = \
                bin_image(co_data,
                          binsize=(1, cloud.binsize, cloud.binsize),
                          header=co_header,
                          statistic=np.nanmean)

            fits.writeto(cloud.co_filename.replace('.fits', '_bin.fits'),
                         cloud.co_data,
                         cloud.co_header,
                         )
        else:
            cloud.co_data, cloud.co_header = \
                fits.getdata(co_filename,
                                                          header=True,
                                                          )

        cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        av_filename = cloud.av_filename_bin
        hi_filename = cloud.hi_filename_bin
    else:
        av_filename = cloud.av_filename
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(av_filename, header=True)
    cloud.hi_data, cloud.hi_header = \
            fits.getdata(hi_filename, header=True)
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    import matplotlib.pyplot as plt
    plt.close(); plt.clf();
    co = np.copy(cloud.co_data[30,:,:])
    co[co_mask] = np.nan
    plt.imshow(co, origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/comap_' + cloud.region + '.png')

    assert all((cloud.hi_data.shape, cloud.co_data.shape,
                cloud.region_mask.shape))

    cloudpy.plot_hi_spectrum(cloud,
                      filename=filename_base + '.png',
                      limits=[-50, 30, -10, 70],
                      plot_co=plot_co,
                      hi_mask=hi_mask,
                      co_mask=co_mask,
                      )
Esempio n. 9
0
def run_cloud_analysis(global_args, ):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
        'figure_dir': figure_dir,
        'cloud_name': cloud_name,
        'filename_base': filename_base,
        'plot_diagnostics': global_args['plot_diagnostics'],
        #'av_nhi_contour': av_nhi_contour,
        'av_nhi_contour': True,
        'av_nhi_limits': [0, 20, -1, 9],
        #'av_nhi_limits': None,
    }

    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])

    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)

    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)

    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                                 statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
        'velocity_range': velocity_range,
        'gauss_fits': gauss_fits,
        'comp_num': comp_num,
        'hi_range_error': hi_range_error,
        'vel_range': velocity_range,
        'gauss_fit_kwargs': gauss_fit_kwargs,
    }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_spectrum,
        gauss_fits=gauss_fits,
        comp_num=comp_num,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_dr1_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_dr1_spectrum,
        gauss_fits=gauss_fits_dr1,
        comp_num=comp_num_dr1,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range_dr1,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(
        nhi_image=nhi_image / nhi_image_dr1,
        header=hi_header,
        limits=[65, 45, 25, 35],
        filename=filename,
        show=0,
        cb_text='DR2 / DR1',
        #hi_vlimits=[0.91, 0.93],
    )
def run_cloud_analysis(global_args,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
                   'figure_dir': figure_dir,
                   'cloud_name': cloud_name,
                   'filename_base': filename_base,
                   'plot_diagnostics': global_args['plot_diagnostics'],
                   #'av_nhi_contour': av_nhi_contour,
                   'av_nhi_contour': True,
                   'av_nhi_limits': [0, 20, -1, 9],
                   #'av_nhi_limits': None,
                    }


    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])


    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)


    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)


    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                             statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
                       'velocity_range': velocity_range,
                       'gauss_fits': gauss_fits,
                       'comp_num': comp_num,
                       'hi_range_error': hi_range_error,
                       'vel_range': velocity_range,
                       'gauss_fit_kwargs': gauss_fit_kwargs,
                       }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_spectrum,
                 gauss_fits=gauss_fits,
                 comp_num=comp_num,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_dr1_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_dr1_spectrum,
                 gauss_fits=gauss_fits_dr1,
                 comp_num=comp_num_dr1,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range_dr1,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(nhi_image=nhi_image / nhi_image_dr1,
                   header=hi_header,
                   limits=[65, 45, 25, 35],
                   filename=filename,
                   show=0,
                   cb_text='DR2 / DR1',
                   #hi_vlimits=[0.91, 0.93],
                   )
Esempio n. 11
0
def plot_co_spectra(results, ):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_co_spectra'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    co_filename = cloud.co_filename

    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        co_filename = co_filename.replace('.fits', '_bin.fits')

    exists = \
        check_file(co_filename, clobber=False)

    if not exists:
        co_data, co_header = fits.getdata(
            co_filename,
            header=True,
        )
        cloud.co_data, cloud.co_header = \
            bin_image(co_data,
                      binsize=(1, cloud.binsize, cloud.binsize),
                      header=co_header,
                      statistic=np.nanmean)

        fits.writeto(
            cloud.co_filename.replace('.fits', '_bin.fits'),
            cloud.co_data,
            cloud.co_header,
        )
    else:
        cloud.co_data, cloud.co_header = \
            fits.getdata(co_filename,
                                                      header=True,
                                                      )

    cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(cloud.av_filename_bin, header=True)
    cloud.load_region(cloud.region_filename, header=cloud.av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    cloudpy.plot_hi_spectrum(
        cloud,
        filename=filename_base + '.png',
        limits=[-50, 30, -10, 70],
        plot_co=plot_co,
        hi_mask=hi_mask,
        co_mask=co_mask,
    )
Esempio n. 12
0
def plot_hi_spectrum(cloud_results, plot_co=1):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_hi_spectrum'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    if plot_co:

        co_filename = cloud.co_filename

        if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
            co_filename = co_filename.replace('.fits', '_bin.fits')

        exists = \
            check_file(co_filename, clobber=False)

        if not exists:
            co_data, co_header = fits.getdata(
                co_filename,
                header=True,
            )
            cloud.co_data, cloud.co_header = \
                bin_image(co_data,
                          binsize=(1, cloud.binsize, cloud.binsize),
                          header=co_header,
                          statistic=np.nanmean)

            fits.writeto(
                cloud.co_filename.replace('.fits', '_bin.fits'),
                cloud.co_data,
                cloud.co_header,
            )
        else:
            cloud.co_data, cloud.co_header = \
                fits.getdata(co_filename,
                                                          header=True,
                                                          )

        cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        av_filename = cloud.av_filename_bin
        hi_filename = cloud.hi_filename_bin
    else:
        av_filename = cloud.av_filename
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(av_filename, header=True)
    cloud.hi_data, cloud.hi_header = \
            fits.getdata(hi_filename, header=True)
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    import matplotlib.pyplot as plt
    plt.close()
    plt.clf()
    co = np.copy(cloud.co_data[30, :, :])
    co[co_mask] = np.nan
    plt.imshow(co, origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/comap_' + cloud.region + '.png')

    assert all(
        (cloud.hi_data.shape, cloud.co_data.shape, cloud.region_mask.shape))

    cloudpy.plot_hi_spectrum(
        cloud,
        filename=filename_base + '.png',
        limits=[-50, 30, -10, 70],
        plot_co=plot_co,
        hi_mask=hi_mask,
        co_mask=co_mask,
    )
Esempio n. 13
0
def main():

    from myimage_analysis import bin_image, calculate_nhi
    from mycoords import make_velocity_axis
    from astropy.io import fits
    import numpy as np
    import mystats
    import myio
    import pickle

    # Location of models
    #MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_cmode1_3inc/'
    MODEL_DIR = '/d/bip3/ezbc/shield/749237_lowres/modeling_highres_2/'

    # If true, deletes files to be written
    CLOBBER = 0

    os.chdir(MODEL_DIR + 'models')

    # Ddelete gipsy files, leaving only fits files
    filename_delete_list = os.listdir('./')
    for filename in filename_delete_list:
        if '.image' in filename or '.descr' in filename:
            os.system('rm -rf ' + filename)

    # get leftover fits files
    filename_init_list = os.listdir('./')

    filename_list = []
    for filename in filename_init_list:
        if 'model' in filename and 'regrid' not in filename:
            filename_list.append(filename)

    #filename_list = filename_list[:5]

    stats = {
        'logL': np.empty(len(filename_list)),
        'model_names': [],
        'std': np.empty(len(filename_list)),
        'mean_abs_resid': np.empty(len(filename_list)),
        'sum_abs_resid': np.empty(len(filename_list)),
    }

    cube_name = '749237_rebin_cube_regrid.fits'
    unbinned_cube_name = '749237_rebin_cube.fits'
    cube_error_name = '749237_rebin_cube_error_regrid.fits'
    data, header = \
        fits.getdata(MODEL_DIR + \
                     cube_name,
                     header=True)
    error = \
        fits.getdata(MODEL_DIR + \
                     cube_error_name)

    data_sum = np.nansum(data)
    #binsize = 6

    _, unbinned_header = fits.getdata(MODEL_DIR + '../' + \
                                      unbinned_cube_name,
                                      header=True)
    beamsize = unbinned_header['BMAJ']
    cdelt = np.abs(unbinned_header['CDELT1'])
    binsize = int(beamsize / cdelt)

    mask = (np.isnan(data) | np.isnan(error))

    # Load the images into miriad
    for i, model_name in enumerate(filename_list):

        model_bin_name = model_name.replace('.FITS', '_regrid.FITS')

        exists = myio.check_file(model_bin_name, clobber=CLOBBER)
        if exists:
            print('Loading cube:\n' + model_bin_name)
            model_bin = fits.getdata(model_bin_name)
        else:
            print('Binning cube:\n' + model_name)

            model = fits.getdata(model_name)

            print('\tBinsize = ' + str(binsize))

            # bin the model
            model_bin = bin_image(model,
                                  binsize=(1, binsize, binsize),
                                  statistic=np.nanmean,
                                  quick_bin=True)

            # normalize the model to have same sum as data
            model_bin = model_bin / np.nansum(model_bin) * data_sum
            #assert np.nansum(model_bin) == data_sum

            # write the model to a file
            fits.writeto(model_bin_name, model_bin, header, clobber=CLOBBER)

        residuals = model_bin[~mask] - data[~mask]
        stats['model_names'].append(model_bin_name)
        stats['logL'][i] = mystats.calc_logL(model_bin[~mask],
                                             data[~mask],
                                             data_error=error[~mask])
        stats['std'][i] = np.nanstd(residuals)
        stats['mean_abs_resid'][i] = np.nanmean(np.abs(residuals))
        stats['sum_abs_resid'][i] = np.nansum(np.abs(residuals))

    with open(MODEL_DIR + 'statistics.pickle', 'wb') as f:
        pickle.dump(stats, f)

    with open(MODEL_DIR + 'statistics.pickle', 'rb') as f:
        stats = pickle.load(f)