예제 #1
0
def plot_nhi_map(cloud_results,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi

    filename = \
            cloud_results['figure_dir'] + 'diagnostics/maps/' + \
            cloud_results['filename_extension'] + '_nhi_map.png'

    cloud = cloud_results['cloud']
    props = cloud.props

    hi_data = fits.getdata(cloud.hi_filename)

    nhi_image = calculate_nhi(cube=hi_data,
                        velocity_axis=cloud.hi_vel_axis,
                        velocity_range=props['hi_velocity_range_max']['value'],
                        )

    # mask where N(HI) < 17 * 10^17 cm^-2
    mask = np.array(nhi_image < 17, dtype=bool)

    cloudpy.plot_nhi_map(cloud_results['cloud'],
                      nhi_image=nhi_image,
                      filename=filename,
                      mask=mask,
                      )
예제 #2
0
def plot_nhi_map(cloud_results, ):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi

    filename = \
            cloud_results['figure_dir'] + 'diagnostics/maps/' + \
            cloud_results['filename_extension'] + '_nhi_map.png'

    cloud = cloud_results['cloud']
    props = cloud.props

    hi_data = fits.getdata(cloud.hi_filename)

    nhi_image = calculate_nhi(
        cube=hi_data,
        velocity_axis=cloud.hi_vel_axis,
        velocity_range=props['hi_velocity_range_max']['value'],
    )

    # mask where N(HI) < 17 * 10^17 cm^-2
    mask = np.array(nhi_image < 17, dtype=bool)

    cloudpy.plot_nhi_map(
        cloud_results['cloud'],
        nhi_image=nhi_image,
        filename=filename,
        mask=mask,
    )
def calc_model_chisq(params, av_image=None, av_image_error=None, hi_cube=None,
        hi_velocity_axis=None, hi_noise_cube=None, dgr=None):

    from myimage_analysis import calculate_nhi

    velocity_range = params['low_vel'].value, params['high_vel'].value

    nhi_image_temp, nhi_image_error = calculate_nhi(cube=hi_cube,
            velocity_axis=hi_velocity_axis,
            velocity_range=velocity_range,
            noise_cube=hi_noise_cube)

    # Select pixels with Av > 1.0 mag and Av_SNR > 5.0.
    # Av > 1.0 mag is used to avoid too low Av.
    # 1.0 mag corresponds to SNR = 1 / 0.2 ~ 5
    # (see Table 2 of Ridge et al. 2006).
    indices = np.where((nhi_image_temp == nhi_image_temp) & \
                       (av_image == av_image) & \
                       (av_image_error == av_image_error))

    nhi_image_corr = nhi_image_temp[indices]
    nhi_image_error_corr = nhi_image_error[indices]
    av_image_data = av_image[indices]
    av_image_error_data = av_image_error[indices]

    # Create model image
    av_image_model = nhi_image_corr * dgr
    av_image_error_model = nhi_image_error_corr * dgr

    chisq = np.sum((av_image_data - av_image_model)**2 / \
                   (av_image_error_data**2))

    return chisq
def calc_model_chisq(params, av_image=None, av_image_error=None, hi_cube=None,
        hi_velocity_axis=None, hi_noise_cube=None, dgr=None):

    from myimage_analysis import calculate_nhi

    velocity_range = params['low_vel'].value, params['high_vel'].value

    nhi_image_temp, nhi_image_error = calculate_nhi(cube=hi_cube,
            velocity_axis=hi_velocity_axis,
            velocity_range=velocity_range,
            noise_cube=hi_noise_cube)

    # Select pixels with Av > 1.0 mag and Av_SNR > 5.0.
    # Av > 1.0 mag is used to avoid too low Av.
    # 1.0 mag likelihoodesponds to SNR = 1 / 0.2 ~ 5
    # (see Table 2 of Ridge et al. 2006).
    indices = np.where((nhi_image_temp == nhi_image_temp) & \
                       (av_image == av_image) & \
                       (av_image_error == av_image_error))

    nhi_image_likelihood = nhi_image_temp[indices]
    nhi_image_error_likelihood = nhi_image_error[indices]
    av_image_data = av_image[indices]
    av_image_error_data = av_image_error[indices]

    # Create model image
    av_image_model = nhi_image_likelihood * dgr
    av_image_error_model = nhi_image_error_likelihood * dgr

    chisq = np.sum((av_image_data - av_image_model)**2 / \
                   (av_image_error_data**2))

    return chisq
def calc_logL(theta):

    '''
    Calculates log likelihood

    http://www.physics.utah.edu/~detar/phys6720/handouts/curve_fit/curve_fit/node2.html

    '''

    from myimage_analysis import calculate_nhi

    # Unpack parameters
    vel_width, dgr, av_thres = theta


    # Define velocity range
    vel_range = (_velocity_center - vel_width / 2.,
                 _velocity_center + vel_width / 2.)

    # Derive N(HI) maps
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=_hi_cube,
                velocity_axis=_hi_velocity_axis,
                velocity_range=vel_range,
                noise_cube=_hi_noise_cube)

    # Avoid NaNs, and mask images with Av threshold
    indices = np.where((nhi_image_temp == nhi_image_temp) & \
                       (_av_image == _av_image) & \
                       (_av_image <= av_thres))

    nhi_image_sub = nhi_image_temp[indices]
    nhi_image_sub_error = nhi_image_error[indices]
    av_image_sub = _av_image[indices]
    av_image_sub_error = np.average(_av_image_error[indices])

    # Create model of Av with N(HI) and DGR
    av_image_model = nhi_image_sub * dgr

    data = av_image_sub
    model = av_image_model
    error = av_image_sub_error
    N = av_image_sub.size

    if error > 0 and N > 0:
        #logL = -(np.sum((av_image_sub - av_image_model)**2 / \
        #         (2 * av_image_sub_error**2)) - np.log(av_image_sub.size))
        logL = - np.sum((data - model)**2 / (2 * error**2)) \
               - 0.5 * N * np.log(np.sum(2 * np.pi * error**2))
    else:
    	logL = -np.inf

    print theta, np.median(av_image_model) - np.median(av_image_sub), logL

    if np.isnan(logL):
     	return -np.inf

    return logL
def calc_logL(theta):
    '''
    Calculates log likelihood

    http://www.physics.utah.edu/~detar/phys6720/handouts/curve_fit/curve_fit/node2.html

    '''

    from myimage_analysis import calculate_nhi

    # Unpack parameters
    vel_width, dgr, av_thres = theta

    # Define velocity range
    vel_range = (_velocity_center - vel_width / 2.,
                 _velocity_center + vel_width / 2.)

    # Derive N(HI) maps
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=_hi_cube,
                velocity_axis=_hi_velocity_axis,
                velocity_range=vel_range,
                noise_cube=_hi_noise_cube)

    # Avoid NaNs, and mask images with Av threshold
    indices = np.where((nhi_image_temp == nhi_image_temp) & \
                       (_av_image == _av_image) & \
                       (_av_image <= av_thres))

    nhi_image_sub = nhi_image_temp[indices]
    nhi_image_sub_error = nhi_image_error[indices]
    av_image_sub = _av_image[indices]
    av_image_sub_error = np.average(_av_image_error[indices])

    # Create model of Av with N(HI) and DGR
    av_image_model = nhi_image_sub * dgr

    data = av_image_sub
    model = av_image_model
    error = av_image_sub_error
    N = av_image_sub.size

    if error > 0 and N > 0:
        #logL = -(np.sum((av_image_sub - av_image_model)**2 / \
        #         (2 * av_image_sub_error**2)) - np.log(av_image_sub.size))
        logL = - np.sum((data - model)**2 / (2 * error**2)) \
               - 0.5 * N * np.log(np.sum(2 * np.pi * error**2))
    else:
        logL = -np.inf

    print theta, np.median(av_image_model) - np.median(av_image_sub), logL

    if np.isnan(logL):
        return -np.inf

    return logL
예제 #7
0
def plot_nh2_vs_nhi(cloud_results):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_nh2_vs_nhi'

    cloud = cloud_results['cloud']
    props = cloud.props
    fit_params = {
        'dgr': props['dust2gas_ratio_max']['value'],
        'intercept': props['intercept_max']['value']
    }

    from astropy.io import fits
    from myimage_analysis import calculate_nhi

    av_data, av_header = fits.getdata(cloud.av_filename, header=True)
    if cloud.av_error_filename is not None:
        av_error_data = fits.getdata(cloud.av_error_filename)
    else:
        av_error_data = np.ones(av_data.shape) * cloud.av_error
    hi_data = fits.getdata(cloud.hi_filename)

    # Derive relevant region
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    region_mask = cloud.region_mask

    # Create data
    nhi_image = calculate_nhi(
        cube=hi_data,
        velocity_axis=cloud.hi_vel_axis,
        velocity_range=props['hi_velocity_range_max']['value'],
    )

    nh2_image = 0.5 * ((av_data - fit_params['intercept']) / \
                        fit_params['dgr'] - nhi_image)

    mask = (region_mask) | (nhi_image < 0)

    nhi_image[mask] = np.nan
    nh2_image[mask] = np.nan
    #levels = np.logspace(np.log10(0.999), np.log10(0.5), 10)
    levels = 7

    cloudpy.plot_nh2_vs_nhi(
        nhi_image,
        nh2_image,
        filename=filename_base + '.png',
        title=cloud_results['args']['data_type'] + ', unmasked',
        fit_params=fit_params,
        levels=levels,
        #limits=[3,20, 0, 3],
        contour_plot=1,
    )
예제 #8
0
def plot_nh2_vs_nhi(cloud_results):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_nh2_vs_nhi'

    cloud = cloud_results['cloud']
    props = cloud.props
    fit_params = {
                  'dgr': props['dust2gas_ratio_max']['value'],
                  'intercept': props['intercept_max']['value']}

    from astropy.io import fits
    from myimage_analysis import calculate_nhi

    av_data, av_header = fits.getdata(cloud.av_filename, header=True)
    if cloud.av_error_filename is not None:
        av_error_data = fits.getdata(cloud.av_error_filename)
    else:
        av_error_data = np.ones(av_data.shape) * cloud.av_error
    hi_data = fits.getdata(cloud.hi_filename)

    # Derive relevant region
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    region_mask = cloud.region_mask

    # Create data
    nhi_image = calculate_nhi(cube=hi_data,
                        velocity_axis=cloud.hi_vel_axis,
                        velocity_range=props['hi_velocity_range_max']['value'],
                        )

    nh2_image = 0.5 * ((av_data - fit_params['intercept']) / \
                        fit_params['dgr'] - nhi_image)

    mask = (region_mask) | (nhi_image < 0)

    nhi_image[mask] = np.nan
    nh2_image[mask] = np.nan
    #levels = np.logspace(np.log10(0.999), np.log10(0.5), 10)
    levels = 7

    cloudpy.plot_nh2_vs_nhi(nhi_image,
                      nh2_image,
                      filename=filename_base + '.png',
                      title=cloud_results['args']['data_type'] + ', unmasked',
                      fit_params=fit_params,
                      levels=levels,
                      #limits=[3,20, 0, 3],
                      contour_plot=1,
                      )
def search_likelihoods(mesh):

    from myimage_analysis import calculate_nhi

    # calculate the likelihoodelation coefficient for each velocity range

    # Progress bar parameters
    #total = float(likelihoods.size)
    #count = 0

    try:
        velocity_center = mesh[0]
        velocity_width = mesh[1]
        dgr = mesh[2]

        velocity_range = (velocity_center - velocity_width / 2.,
                          velocity_center + velocity_width / 2.)

        nhi_image_temp, nhi_image_error = \
                calculate_nhi(cube=hi_cube,
                    velocity_axis=hi_velocity_axis,
                    velocity_range=velocity_range,
                    noise_cube=hi_noise_cube)

        # Avoid NaNs
        indices = np.where((nhi_image_temp == nhi_image_temp) & \
                           (av_image == av_image) & \
                           (nhi_image_temp > 0))

        nhi_image_likelihood = nhi_image_temp[indices]
        nhi_image_error_likelihood = nhi_image_error[indices]
        av_image_likelihood = av_image[indices]
        if type(av_image_error) != float:
            av_image_error_likelihood = np.median(av_image_error[indices])
        else:
            av_image_error_likelihood = np.median(av_image_error)

        # Create model of Av with N(HI) and DGR
        av_image_model = nhi_image_likelihood * dgr

        logL = calc_logL(av_image_model,
                         av_image_likelihood,
                         data_error=av_image_error_likelihood)

        likelihood = logL

        return likelihood
    except KeyboardInterrupt:
        raise KeyboardInterruptError
예제 #10
0
            def test_mle_derivation(self, ):

                from numpy.testing import assert_array_almost_equal
                from numpy.testing import assert_almost_equal
                from myimage_analysis import calculate_nhi

                dgr = 0.1  # cm^2 10^-20 mag
                intercept = 1  # mag
                width = 20  # km/s

                vel_range = (self.cloud.vel_center - width / 2.0,
                             self.cloud.vel_center + width / 2.0)

                nhi_image = calculate_nhi(cube=self.cloud.hi_data,
                                          velocity_axis=self.cloud.hi_vel_axis,
                                          velocity_range=vel_range)

                # Create mock Av_data
                if 0:
                    av_data_mock = dgr * nhi_image + intercept

                    self.cloud.av_data = av_data_mock

                    self.cloud.run_analysis(
                        region_filename=self.region_filename,
                        region=self.region)

                    print('\nSaving cloud...')
                    cloudpy.save(self.cloud, self.cloud_filename)
                else:
                    self.cloud = cloudpy.load(self.cloud_filename)

                dgr_mle = self.cloud.props['dust2gas_ratio_max']['value']
                intercept_mle = self.cloud.props['intercept_max']['value']
                width_mle = self.cloud.props['hi_velocity_width_max']['value']

                assert_almost_equal(dgr_mle, dgr, decimal=1)
                assert_almost_equal(intercept_mle, intercept, decimal=1)
                assert_almost_equal(width_mle, width, decimal=-1)
예제 #11
0
            def test_mle_derivation(self,):

                from numpy.testing import assert_array_almost_equal
                from numpy.testing import assert_almost_equal
                from myimage_analysis import calculate_nhi

                dgr = 0.1 # cm^2 10^-20 mag
                intercept = 1 # mag
                width = 20 # km/s

                vel_range = (self.cloud.vel_center - width / 2.0,
                             self.cloud.vel_center + width / 2.0)

                nhi_image = calculate_nhi(cube=self.cloud.hi_data,
                                          velocity_axis=self.cloud.hi_vel_axis,
                                          velocity_range=vel_range)

                # Create mock Av_data
                if 0:
                    av_data_mock = dgr * nhi_image + intercept

                    self.cloud.av_data = av_data_mock

                    self.cloud.run_analysis(region_filename=self.region_filename,
                                            region=self.region)

                    print('\nSaving cloud...')
                    cloudpy.save(self.cloud, self.cloud_filename)
                else:
                    self.cloud = cloudpy.load(self.cloud_filename)

                dgr_mle = self.cloud.props['dust2gas_ratio_max']['value']
                intercept_mle = self.cloud.props['intercept_max']['value']
                width_mle = self.cloud.props['hi_velocity_width_max']['value']

                assert_almost_equal(dgr_mle, dgr, decimal=1)
                assert_almost_equal(intercept_mle, intercept, decimal=1)
                assert_almost_equal(width_mle, width, decimal=-1)
def plot_nhi_maps(results_dict, limits=None, cube_data=None, header=None,
        load_synthetic_cube=False, show=False, velocity_range=[0, 500],
        save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG_BASE = DIR_FIG + 'nhi_map_data_synth'

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(cube=cube_data,
                                  velocity_axis=velocity_axis,
                                  velocity_range=velocity_range,
                                  )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_dict['velocity_axis']
    if not load_synthetic_cube:
        print('\nCreating synthetic cube...')
        cube_synthetic = create_synthetic_cube(results_dict, cube_data)

        np.save(FILENAME_CUBE_SYNTH, cube_synthetic)
    else:
        print('\nLoading synthetic cube...')
        cube_synthetic = np.load(FILENAME_CUBE_SYNTH)

    # Create N(HI) synthetic
    nhi_synthetic = myia.calculate_nhi(cube=cube_synthetic,
                                       velocity_axis=velocity_axis,
                                       velocity_range=velocity_range,
                                       )

    v_limits = [0, np.max(nhi_data)]
    v_limits = [-1, 41]

    if 0:
        import matplotlib.pyplot as plt
        plt.close(); plt.clf()
        fig, axes = plt.subplots(2,1)
        axes[0].imshow(nhi_data, origin='lower')
        axes[1].imshow(nhi_synthetic, origin='lower')
        plt.show()

    if save_pdf:
        ext = '.pdf'
    else:
        ext = '.png'
    filename_fig = FILENAME_FIG_BASE + ext
    print('\nPlotting N(HI) maps...')
    print(filename_fig)
    # Plot the maps together
    plot_nhi_maps(nhi_data,
                  nhi_synthetic,
                  header=header,
                  #limits=[278, -37, 282, -35],
                  limits=limits,
                  filename=filename_fig,
                  nhi_1_vlimits=v_limits,
                  nhi_2_vlimits=v_limits,
                  show=show,
                  vscale='linear',
                  )
def main():

    import grid
    import numpy as np
    from myimage_analysis import calculate_nhi
    from mycoords import make_velocity_axis
    import pyfits as pf
    import mygeometry as myg
    import json

    # parameters used in script
    # -------------------------
    # Regions
    # Options are 'ds9' or 'av_gradient'
    box_method = 'av_gradient'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/dgr/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/cfa/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'

    av_data_planck, planck_header = pf.getdata(av_dir + \
                'perseus_av_planck_5arcmin.fits',
            header=True)
    av_data_error_planck, planck_header = pf.getdata(av_dir + \
                'perseus_av_error_planck_5arcmin.fits',
            header=True)

    # load GALFA HI
    hi_data, hi_header = pf.getdata(hi_dir + \
            'perseus_hi_galfa_cube_regrid_planckres.fits',
            header=True)
    velocity_axis = make_velocity_axis(hi_header)

    noise_cube, noise_header = pf.getdata(hi_dir + \
            'perseus_hi_galfa_cube_regrid_planckres_noise.fits', header=True)

    # define core properties
    with open(core_dir + 'perseus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    cores = convert_core_coordinates(cores, planck_header)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'perseus_av_boxes_',
            header = planck_header)

    # Initialize lists
    av_images = []
    av_error_images = []
    nhi_images = []
    nhi_error_images = []

    for core in cores:
        print('\nCalculating for core %s' % core)
        if box_method == 'ds9':
            # Grab the mask from the DS9 regions
            xy = cores[core]['box_center_pix']
            box_width = cores[core]['box_width']
            box_height = cores[core]['box_height']
            box_angle = cores[core]['box_angle']
            mask = myg.get_rectangular_mask(av_data_planck,
                    xy[0], xy[1],
                    width = box_width,
                    height = box_height,
                    angle = box_angle)
        elif box_method == 'av_gradient':
            mask = myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])
        else:
        	raise ValueError('Method for boxes is either ds9 or av_gradient')

        indices = mask == 1

        # Get only the relevant pixels to decrease computation time
        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_planck_sub = np.copy(av_data_planck[indices])
        av_data_error_planck_sub = np.copy(av_data_error_planck[indices])

        # Derive N(HI) image
        nhi_image, nhi_image_error = calculate_nhi(cube=hi_data_sub,
                velocity_axis=velocity_axis,
                noise_cube=noise_cube_sub,
                velocity_range=cores[core]['hi_velocity_range'])

        nhi_images.append(nhi_image)
        nhi_error_images.append(nhi_image_error)
        av_images.append(av_data_planck_sub)
        av_error_images.append(av_data_error_planck_sub)

    plot_av_vs_nhi_grid(nhi_images,
                        av_images,
                        av_error_images=av_error_images,
                        nhi_error_images=nhi_error_images,
                        #limits=[0,14, 0,10],
                        scale=['linear', 'log'],
                        savedir=figure_dir,
                        plot_type='scatter',
                        filename='perseus_av_vs_nhi_panels.png',
                        color_scale='linear')

    # Derive N(HI) image
    nhi_image, nhi_image_error = calculate_nhi(cube=hi_data,
            velocity_axis=velocity_axis,
            noise_cube=noise_cube,
            velocity_range=cores[core]['hi_velocity_range'])

    # Plot correlation, similar to Figure 3 of Paradis et al. (2012)
    plot_av_vs_nhi(nhi_image,
            av_data_planck,
            savedir=figure_dir,
            scale=['log', 'linear'],
            filename='perseus_av_vs_nhi_global.png',
            color_scale='linear')
예제 #14
0
def main(av_data_type='planck'):

    # Import external modules
    # -----------------------
    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube
    #from astropy.io import fits
    import pyfits as fits
    import matplotlib.pyplot as plt

    # Set parameters
    # --------------
    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    likelihood_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)
    results_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)

    # Name of HI noise cube
    noise_cube_filename = 'taurus_hi_galfa_cube_regrid_planckres_noise'

    # Threshold for converging DGR
    threshold_delta_dgr = 0.0005

    # Number of white noise standard deviations with which to fit the
    # residuals in iterative masking
    resid_width_scale = 2.0

    # Name of property files results are written to
    global_property_file = 'taurus_global_properties.txt'

    # Likelihood axis resolutions
    vel_widths = np.arange(1, 30, 2*0.16667)
    dgrs = np.arange(0.01, 0.2, 1e-3)

    # Velocity range over which to integrate HI for deriving the mask
    vel_range = (-10, 10)

    # Use binned image?
    use_binned_image = False

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/taurus/figures/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    co_dir = '/d/bip3/ezbc/taurus/data/co/'
    core_dir = '/d/bip3/ezbc/taurus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/taurus/data/python_output/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'

    # Load data
    # ---------
    if use_binned_image:
        bin_string = '_bin'
    else:
        bin_string = ''
    noise_cube_filename += bin_string

    av_data, av_header = fits.getdata(av_dir + \
                            'taurus_av_planck_5arcmin' + bin_string + '.fits',
                                      header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'taurus_av_error_planck_5arcmin' + bin_string + '.fits',
            header=True)
    #av_data_error = (100 * 0.025**2) * np.ones(av_data_error.shape)
    #av_data_error *= 10.0

    hi_data, hi_header = fits.getdata(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres' + bin_string + '.fits',
            header=True)

    # Load global properties
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    # Prepare data products
    # ---------------------
    # Change WCS coords to pixel coords of images
    global_props = convert_limit_coordinates(global_props, header=av_header)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename + '.fits'):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename + '.fits')
    else:
        noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename,
            header=True)

    # Derive relevant region
    pix = global_props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block off region
    region_mask = myg.get_polygon_mask(av_data, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    # Derive mask by excluding correlated residuals
    # ---------------------------------------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    av_model, mask, dgr = iterate_residual_masking(
                             nhi_image=nhi_image,
                             av_data=av_data,
                             av_data_error=av_data_error,
                             vel_range=vel_range,
                             threshold_delta_dgr=threshold_delta_dgr,
                             resid_width_scale=resid_width_scale,
                             plot_progress=False
                             )

    # Combine region mask with new mask
    mask += np.logical_not(region_mask)

    # Derive center velocity from hi
    # ------------------------------
    hi_spectrum = np.sum(hi_data[:, ~mask], axis=(1))
    vel_center = np.array((np.average(hi_vel_axis,
                           weights=hi_spectrum**2),))[0]
    print('\nVelocity center from HI = ' +\
            '{0:.2f} km/s'.format(vel_center))

    # Perform likelihood calculation of masked images
    # -----------------------------------------------
    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    results = calc_likelihoods(
                     hi_cube=hi_data[:, ~mask],
                     hi_vel_axis=hi_vel_axis,
                     av_image=av_data[~mask],
                     av_image_error=av_data_error[~mask],
                     vel_center=vel_center,
                     vel_widths=vel_widths,
                     dgrs=dgrs,
                     results_filename='',
                     return_likelihoods=True,
                     likelihood_filename=None,
                     clobber=False,
                     conf=conf,
                     )

    # Unpack output of likelihood calculation
    (vel_range_confint, width_confint, dgr_confint, likelihoods,
            width_likelihood, dgr_likelihood, width_max, dgr_max,
            vel_range_max) = results

    print('\nHI velocity integration range:')
    print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                 vel_range_confint[1]))
    print('\nDGR:')
    print('%.1f x 10^-20 cm^2 mag' % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_range=vel_range_max,
                noise_cube=noise_cube,
                return_nhi_error=True)
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = ((av_image_model == av_image_model) & \
               (av_data == av_data))
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask])**2 / \
            av_data_error[~mask]**2) / av_data[~mask].size

    print('\nTotal number of pixels in analysis, after masking = ' + \
            '{0:.0f}'.format(npix))

    print('\nReduced chi^2 = {0:.1f}'.format(chisq))

    # Write results to global properties
    global_props['dust2gas_ratio'] = {}
    global_props['dust2gas_ratio_error'] = {}
    global_props['hi_velocity_width'] = {}
    global_props['hi_velocity_width_error'] = {}
    global_props['dust2gas_ratio_max'] = {}
    global_props['hi_velocity_center_max'] = {}
    global_props['hi_velocity_width_max'] = {}
    global_props['hi_velocity_range_max'] =  {}
    global_props['av_threshold'] = {}
    global_props['co_threshold'] = {}
    global_props['hi_velocity_width']['value'] = width_confint[0]
    global_props['hi_velocity_width']['unit'] = 'km/s'
    global_props['hi_velocity_width_error']['value'] = width_confint[1:]
    global_props['hi_velocity_width_error']['unit'] = 'km/s'
    global_props['hi_velocity_range'] = vel_range_confint[0:2]
    global_props['hi_velocity_range_error'] = vel_range_confint[2:]
    global_props['dust2gas_ratio']['value'] = dgr_confint[0]
    global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
    global_props['dust2gas_ratio_max']['value'] = dgr_max
    global_props['hi_velocity_center_max']['value'] = vel_center
    global_props['hi_velocity_width_max']['value'] = width_max
    global_props['hi_velocity_range_max']['value'] = vel_range_max
    global_props['hi_velocity_range_conf'] = conf
    global_props['width_likelihood'] = width_likelihood.tolist()
    global_props['dgr_likelihood'] = dgr_likelihood.tolist()
    global_props['vel_centers'] = [vel_center,]
    global_props['vel_widths'] = vel_widths.tolist()
    global_props['dgrs'] = dgrs.tolist()
    global_props['likelihoods'] = likelihoods.tolist()
    global_props['av_threshold']['value'] = None
    global_props['av_threshold']['unit'] = 'mag'
    global_props['co_threshold']['value'] = None
    global_props['co_threshold']['unit'] = 'K km/s'
    global_props['chisq'] = chisq
    global_props['npix'] = npix
    global_props['mask'] = mask.tolist()

    with open(property_dir + global_property_file, 'w') as f:
        json.dump(global_props, f)

    # Plot likelihood space
    plot_likelihoods_hist(global_props,
                          plot_axes=('widths', 'dgrs'),
                          show=0,
                          returnimage=False,
                          filename=results_filename + '_wd.png',
                          contour_confs=contour_confs)

    plt.clf(); plt.close()
    nhi_image_copy = np.copy(nhi_image)
    nhi_image_copy[mask] = np.nan
    av_image_copy = np.copy(av_data)
    resid_image = av_image_copy - nhi_image_copy * dgr
    plt.imshow(resid_image, origin='lower')
    plt.title(r'$A_V$ Data - Model')
    plt.colorbar()
    plt.show()
def correlate_hi_av(hi_cube=None, hi_velocity_axis=None, hi_noise_cube=None,
        av_image=None, av_image_error=None, velocity_centers=None,
        velocity_widths=None, return_correlations=True, dgr=None,
        plot_results=True, results_filename='', likelihood_filename=None,
        clobber=False, hi_vel_range_conf=0.68):

    '''
    Parameters
    ----------

    Returns
    -------
    hi_vel_range : tuple
        Lower and upper bound of HI velocity range in km/s which provides the
        best correlated N(HI) distribution with Av.
    correlations : array-like, optional
        Array of Pearson correlation coefficients corresponding to each
        permutation through the velocity centers and velocity widths.

    '''

    import numpy as np
    from scipy.stats import pearsonr
    from scipy.stats import kendalltau
    from myimage_analysis import calculate_nhi
    from scipy import signal
    from os import path
    from astropy.io import fits

    # Check if likelihood grid should be derived
    if likelihood_filename is not None:
        if not path.isfile(likelihood_filename):
            perform_mle = True
            write_mle = True
        elif clobber:
            perform_mle = True
            write_mle = True
        else:
            perform_mle = False
            write_mle = False
    # If no filename provided, do not read file and do not write file
    else:
        write_mle = False
        perform_mle = True

    if perform_mle:
        # calculate the velocity ranges given a set of centers and widths
        velocity_ranges = np.zeros(shape=[len(velocity_centers) * \
                len(velocity_widths),2])
        count = 0
        for i, center in enumerate(velocity_centers):
            for j, width in enumerate(velocity_widths):
                velocity_ranges[count, 0] = center - width/2.
                velocity_ranges[count, 1] = center + width/2.
                count += 1

        # calculate the correlation coefficient for each velocity range
        correlations = np.zeros(velocity_ranges.shape[0])
        pvalues = np.zeros(velocity_ranges.shape[0])

        for i, velocity_range in enumerate(velocity_ranges):
            nhi_image_temp, nhi_image_error = calculate_nhi(cube=hi_cube,
                    velocity_axis=hi_velocity_axis,
                    velocity_range=velocity_range,
                    noise_cube=hi_noise_cube)

            nhi_image = np.ma.array(nhi_image_temp,
                                    mask=np.isnan(nhi_image_temp))

            # Avoid NaNs
            indices = np.where((nhi_image_temp == nhi_image_temp) & \
                               (av_image == av_image))

            nhi_image_corr = nhi_image_temp[indices]
            nhi_image_error_corr = nhi_image_error[indices]
            av_image_corr = av_image[indices]
            if type(av_image_error) != float:
                av_image_error_corr = av_image_error[indices]
            else:
                av_image_error_corr = av_image_error


            # Create model of Av with N(HI) and DGR
            av_image_model = nhi_image_corr * dgr
            av_image_model_error = nhi_image_error_corr * dgr

            print('median of model and data (mag)')
            print(np.median(av_image_model), np.median(av_image_corr))

            logL = calc_logL(av_image_model,
                             av_image_corr,
                             data_error=av_image_error_corr)

            correlations[i] = -logL

            # Shows progress each 10%
            total = float(correlations.shape[0])
            abs_step = int((total * 1)/10) or 10
            if i and not i % abs_step:
                print "\t{0:.0%} processed".format(i/total)

        # Normalize the log likelihoods
        correlations -= correlations.max()

        # Convert to likelihoods
        correlations = np.exp(correlations)

        # Normalize the likelihoods
        correlations = correlations / np.sum(correlations)

        # Avoid nans
        correlations = np.ma.array(correlations,
                mask=(correlations != correlations))

        # Reshape array
        correlations_image = np.empty((velocity_centers.shape[0],
                                       velocity_widths.shape[0]))
        correlations_image[:,:] = np.NaN
        count = 0
        for i, center in enumerate(velocity_centers):
            for j, width in enumerate(velocity_widths):
                correlations_image[i,j] = correlations[count]
                count += 1

        # Write out fits file of likelihoods
        if write_mle:
            print('Writing likelihood grid to file:')
            print(likelihood_filename)
            header = fits.Header()
            header['NAXIS'] = 2
            header['CTYPE1'] = 'CENTERS'
            header['CTYPE2'] = 'WIDTHS'
            header['CRPIX1'] = 0
            header['CRPIX2'] = 0
            header['CRVAL1'] = velocity_centers[0]
            header['CRVAL2'] = velocity_widths[0]
            header['CDELT1'] = velocity_centers[1] - velocity_centers[0]
            header['CDELT2'] = velocity_widths[1] - velocity_widths[0]

            hdu = fits.PrimaryHDU(correlations_image, header=header)

            hdu.writeto(likelihood_filename, clobber=clobber)

    # Load file of likelihoods
    elif not perform_mle:
        print('Reading likelihood grid file:')
        print(likelihood_filename)

        hdu = fits.open(likelihood_filename)
        correlations_image = hdu[0].data

        if len(velocity_centers) != correlations_image.shape[0] or \
            len(velocity_widths) != correlations_image.shape[1]:
            raise ValueError('Specified parameter grid not the same as in' + \
                    'loaded data likelihoods.')

    # Define parameter resolutions
    delta_center = velocity_centers[1] - velocity_centers[0]
    delta_width = velocity_widths[1] - velocity_widths[0]

    # Derive marginal distributions of both centers and widths
    center_corr = np.sum(correlations_image, axis=1) / \
            np.sum(correlations_image)
    width_corr = np.sum(correlations_image, axis=0) / \
            np.sum(correlations_image)

    # Derive confidence intervals of parameters
    center_confint = threshold_area(velocity_centers,
                                    center_corr,
                                    area_fraction=hi_vel_range_conf)
    width_confint = threshold_area(velocity_widths,
                                   width_corr,
                                   area_fraction=hi_vel_range_conf)

    print('Velocity widths = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(width_confint[0],
                                                    width_confint[2],
                                                    np.abs(width_confint[1])))
    print('Velocity centers = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(center_confint[0],
                                                    center_confint[2],
                                                    np.abs(center_confint[1])))

    # Write PDF
    center = center_confint[0]
    upper_lim = (center_confint[0] + width_confint[0]/2.)
    lower_lim = (center_confint[0] - width_confint[0]/2.)
    upper_lim_error = (center_confint[2]**2 + width_confint[2]**2)**0.5
    lower_lim_error = (center_confint[1]**2 + width_confint[1]**2)**0.5

    vel_range_confint = (lower_lim, upper_lim, lower_lim_error,
            upper_lim_error)

    if plot_results:
        plot_correlations(correlations_image,
                          velocity_centers,
                          velocity_widths,
                          show=0,
                          returnimage=False,
                          filename=results_filename)
        plot_correlations_hist(correlations_image,
                              velocity_centers,
                              velocity_widths,
                              center_pdf=center_corr,
                              width_pdf=width_corr,
                              center_confint=center_confint,
                              width_confint=width_confint,
                              show=0,
                              returnimage=False,
                              filename=results_filename)

    if not return_correlations:
        return vel_range_confint
    else:
        return vel_range_confint, correlations_image, center_corr, width_corr
예제 #16
0
    def nhi_test():

        '''
        operating on following cube:
        [[[  0.   1.]
          [  2.   3.]
          [  4.   5.]]

         [[  6.   7.]
          [  8.   9.]
          [ 10.  11.]]

         [[ 12.  13.]
          [ 14.  15.]
          [ 16.  17.]]

         [[ 18.  19.]
          [ 20.  21.]
          [ 22.  23.]]]

        and velocity axis as
        [0 0.5 1.0 1.5]


        '''

        import numpy as np
        from numpy.testing import assert_almost_equal
        from myimage_analysis import calculate_nhi

        # Create cube
        cube = np.empty((4,3,2))
        count = 0
        for index, element in np.ndenumerate(cube):
            cube[index] = count
            count += 1

        # create velocity axis in km/s
        delta_v = 0.5
        vel_axis = np.arange(0, cube.shape[0]*delta_v, delta_v)

        # Test with cube in 3 dimensions
        # No error returned
        # One vel range
        # --------------------------------------------------------------------------
        nhi_calc = calculate_nhi(cube=cube,
                                 velocity_axis=vel_axis,
                                 velocity_range=(0, 1.0))

        nhi_answer = np.array([[0 + 6 + 12, 1 + 7 + 13],
                               [2 + 8 + 14, 3 + 9 + 15],
                               [4 + 10 + 16, 5 + 11 + 17]],
                              dtype=float)

        nhi_answer *= 1.823e-2 * delta_v

        assert_almost_equal(nhi_calc, nhi_answer)

        # Test with cube in 3 dimensions
        # Error returned
        # One vel range
        # --------------------------------------------------------------------------

        print('3D, Error, 1 vel range')

        noise_cube = 0.1 * np.copy(cube)

        nhi_calc, nhi_error_calc = calculate_nhi(cube=cube,
                                                 velocity_axis=vel_axis,
                                                 velocity_range=(0, 1.0),
                                                 noise_cube=noise_cube,
                                                 return_nhi_error=True)

        nhi_answer = np.array([[0 + 6 + 12, 1 + 7 + 13],
                               [2 + 8 + 14, 3 + 9 + 15],
                               [4 + 10 + 16, 5 + 11 + 17]],
                              dtype=float)

        nhi_answer *= 1.823e-2 * delta_v

        nhi_error_answer = np.array([[0**2 + 0.6**2 + 0.12**2,
                                      0.1**2 + 0.7**2 + 0.13**2],
                                     [0.2**2 + 0.8**2 + 0.14**2,
                                      0.3**2 + 0.9**2 + 0.15**2],
                                     [0.4**2 + 0.10**2 + 0.16**2,
                                      0.5**2 + 0.11**2 + 0.17**2]],
                              dtype=float)**0.5

        nhi_error_answer *= 1.823e-2 * delta_v

        assert_almost_equal(nhi_calc, nhi_answer)
        #assert_almost_equal(nhi_error_calc, nhi_error_answer)

        # Test with cube in 3 dimensions
        # No error returned
        # Image of vel ranges
        # --------------------------------------------------------------------------
        print('3D, no error, multiple vel range')

        velocity_range = np.array([[[0, 0],
                                    [0, 0],
                                    [0.5, 0.5]],
                                   [[1.0, 1.0],
                                    [1.0, 1.0],
                                    [1.5, 1.5]]])

        nhi_calc = calculate_nhi(cube=cube,
                                 velocity_axis=vel_axis,
                                 velocity_range=velocity_range)

        nhi_answer = np.array([[0 + 6 + 12, 1 + 7 + 13],
                               [2 + 8 + 14, 3 + 9 + 15],
                               [10 + 16 + 22, 11 + 17 + 23]],
                              dtype=float)

        nhi_answer *= 1.823e-2 * delta_v

        assert_almost_equal(nhi_calc, nhi_answer)

        # Test with cube in 3 dimensions
        # Error returned
        # Image of vel ranges
        # --------------------------------------------------------------------------
        print('3D, error, multiple vel range')
        velocity_range = np.array([[[0, 0],
                                    [0, 0],
                                    [0.5, 0.5]],
                                   [[1.0, 1.0],
                                    [1.0, 1.0],
                                    [1.5, 1.5]]])

        noise_cube = 0.1 * np.copy(cube)

        nhi_calc, nhi_error = calculate_nhi(cube=cube,
                                 velocity_axis=vel_axis,
                                 velocity_range=velocity_range,
                                 noise_cube=noise_cube,
                                 return_nhi_error=True)

        nhi_answer = np.array([[0 + 6 + 12, 1 + 7 + 13],
                               [2 + 8 + 14, 3 + 9 + 15],
                               [10 + 16 + 22, 11 + 17 + 23]],
                              dtype=float)

        nhi_answer *= 1.823e-2 * delta_v

        assert_almost_equal(nhi_calc, nhi_answer)


        # Test with 2 dimension
        # ==========================================================================
        print('2D, no error, 1 vel range')

        # Create cube
        cube = np.empty((4,6))
        count = 0
        for index, element in np.ndenumerate(cube):
            cube[index] = count
            count += 1

        # create velocity axis in km/s
        delta_v = 0.5
        vel_axis = np.arange(0, cube.shape[0]*delta_v, delta_v)

        # Test with cube in 3 dimensions
        # No error returned
        # One vel range
        # --------------------------------------------------------------------------
        nhi_calc = calculate_nhi(cube=cube,
                                 velocity_axis=vel_axis,
                                 velocity_range=(0, 1.0))

        nhi_answer = np.array([0 + 6 + 12, 1 + 7 + 13,
                               2 + 8 + 14, 3 + 9 + 15,
                               4 + 10 + 16, 5 + 11 + 17],
                              dtype=float)

        nhi_answer *= 1.823e-2 * delta_v

        assert_almost_equal(nhi_calc, nhi_answer)

        # Test with cube in 2 dimensions
        # No error returned
        # Image of vel ranges
        # --------------------------------------------------------------------------
        print('2D, no error, multiple vel range')

        velocity_range = np.array([[0, 0,
                                    0, 0,
                                    0.5, 0.5],
                                   [1.0, 1.0,
                                    1.0, 1.0,
                                    1.5, 1.5]])

        print velocity_range.shape, cube.shape

        nhi_calc = calculate_nhi(cube=cube,
                                 velocity_axis=vel_axis,
                                 velocity_range=velocity_range)

        nhi_answer = np.array([0 + 6 + 12, 1 + 7 + 13,
                               2 + 8 + 14, 3 + 9 + 15,
                               10 + 16 + 22, 11 + 17 + 23],
                              dtype=float)

        nhi_answer *= 1.823e-2 * delta_v

        assert_almost_equal(nhi_calc, nhi_answer)
예제 #17
0
def plot_hi_width_correlation(cloud_results,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi
    import mygeometry as myg
    import mycoords
    from scipy.stats import pearsonr

    filename = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_width_correlations.png'

    cloud = cloud_results['cloud']
    props = cloud.props
    fit_params = {
                  'dgr': props['dust2gas_ratio_max']['value'],
                  'intercept': props['intercept_max']['value']}

    if 1:
        if cloud_results['args']['data_type'] == 'lee12':
            av_filename = cloud.av_filename.replace('iris', '2mass')
        else:
            av_filename = cloud.av_filename

        av_data_2mass, av_header = fits.getdata(av_filename, header=True)


        av_filename = av_filename.replace('lee12_2mass_regrid_planckres',
                                          'planck_tau353_5arcmin')
        av_data_planck, av_header = fits.getdata(av_filename, header=True)
        hi_data = fits.getdata(cloud.hi_filename)

    # Derive relevant region
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data_2mass)
    region_mask = cloud.region_mask

    widths = np.arange(2, 80, 2)
    vel_center = 5
    correlations = np.empty(widths.shape)
    correlations_masked_2mass = np.empty(widths.shape)
    correlations_masked_planck = np.empty(widths.shape)

    for i, width in enumerate(widths):

        vel_range = (vel_center - width/2.0, vel_center + width/2.0)

        nhi_image = calculate_nhi(cube=hi_data,
                            velocity_axis=cloud.hi_vel_axis,
                            velocity_range=vel_range,
                            )

        #print av_data.shape, region_mask.shape, nhi_image.shape
        nan_mask = (nhi_image < 0) | (np.isnan(av_data_2mass)) | \
                   (np.isnan(nhi_image))

        mask = np.copy(nan_mask)
        mask[(region_mask) | (cloud.mask)] = True

        lee12_mask = np.copy(nan_mask)
        lee12_mask[av_data_2mass < 5 * 0.20] = True

        # mask
        nhi_image_masked = nhi_image[~mask]
        av_data_2mass_masked = av_data_2mass[~mask]
        av_data_planck_masked = av_data_planck[~mask]

        # derive correlations for each mask
        correlations_masked_2mass[i] = \
                pearsonr(nhi_image_masked, av_data_2mass_masked)[0]
        correlations_masked_planck[i] = \
                pearsonr(nhi_image_masked, av_data_planck_masked)[0]
        correlations[i] = pearsonr(nhi_image[~lee12_mask],
                                   av_data_2mass[~lee12_mask])[0]

    import matplotlib.pyplot as plt
    plt.close(); plt.clf
    av_masked = np.copy(av_data_2mass)
    av_masked[mask] = np.nan
    plt.imshow(av_masked, interpolation='nearest', origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/av_lee12map.png')

    cloudpy.plot_hi_width_correlation(widths,
                                      correlations,
                                      correlations_masked_2mass=\
                                              correlations_masked_2mass,
                                      correlations_masked_planck=\
                                              correlations_masked_planck,
                                      filename=filename,
                                      #limits=[0, 80, 0, 0.4]
                                      )
예제 #18
0
def plot_av_vs_nhi(cloud_results):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_av_vs_nhi'

    cloud = cloud_results['cloud']
    props = cloud.props
    fit_params = {
                  'dgr': props['dust2gas_ratio_max']['value'],
                  'intercept': props['intercept_max']['value'],
                  'dgr_error': props['dust2gas_ratio_error']['value'],
                  'intercept_error': props['intercept_error']['value'],
                  }

    from astropy.io import fits
    from myimage_analysis import calculate_nhi
    import mygeometry as myg
    import mycoords

    if 0:
        av_data = fits.getdata(cloud.av_filename)
        if cloud.av_error_filename is not None:
            av_error_data = fits.getdata(cloud.av_error_filename)
        else:
            av_error_data = np.ones(av_data.shape) * cloud.av_error
        hi_data = fits.getdata(cloud.hi_filename)

    av_data = fits.getdata(cloud.av_filename_bin)
    if cloud.av_error_filename_bin is not None:
        av_error_data = fits.getdata(cloud.av_error_filename_bin)
    else:
        av_error_data = np.ones(av_data.shape) * cloud.av_error
    hi_data = fits.getdata(cloud.hi_filename_bin)

    print np.nansum(hi_data)

    hi_data = cloud._subtract_comps(hi_data=hi_data)

    print np.nansum(hi_data)

    nhi_image = calculate_nhi(cube=hi_data,
                        velocity_axis=cloud.hi_vel_axis,
                        velocity_range=props['hi_velocity_range_max']['value'],
                        )


    nhi_image[nhi_image < 0] = np.nan

    if cloud.av_background is not None:
        av_data = av_data - cloud.av_background

    if cloud_results['args']['bin_image']:
        contour_plot = False
    else:
        contour_plot = True

    #contour_plot = 1

    levels = np.logspace(np.log10(0.999), np.log10(0.5), 10)
    levels = 7

    cloudpy.plot_av_vs_nhi(nhi_image[~cloud.mask],
                      av_data[~cloud.mask],
                      av_error=av_error_data[~cloud.mask],
                      filename=filename_base + '_masked.png',
                      fit_params=fit_params,
                      #limits=[3,20, 0, 3],
                      title=cloud_results['args']['data_type'] + ', masked',
                      levels=levels,
                      contour_plot=contour_plot,
                      #limits=[10, 20, -1, 1],
                      )
    if 1:
        av_data, av_header = fits.getdata(cloud.av_filename, header=True)
        if cloud.av_error_filename is not None:
            av_error_data = fits.getdata(cloud.av_error_filename)
        else:
            av_error_data = np.ones(av_data.shape) * cloud.av_error
        hi_data = fits.getdata(cloud.hi_filename)

    hi_data = cloud._subtract_comps(hi_data=hi_data)

    # Derive relevant region
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    region_mask = cloud.region_mask

    nhi_image = calculate_nhi(cube=hi_data,
                        velocity_axis=cloud.hi_vel_axis,
                        velocity_range=props['hi_velocity_range_max']['value'],
                        )

    mask = (region_mask) | (nhi_image < 0)

    nhi_image[mask] = np.nan
    av_data[mask] = np.nan

    cloudpy.plot_av_vs_nhi(nhi_image,
                      av_data,
                      av_error=av_error_data,
                      filename=filename_base + '.png',
                      fit_params=fit_params,
                      gridsize=(10,10),
                      #limits=[1,20, 0, 4],
                      title=cloud_results['args']['data_type'] + \
                            ', unmasked',
                      std=0.22,
                      contour_plot=True,
                      plot_median=True,
                      #limits=[10, 20, -1, 1],
                      )
예제 #19
0
    def test_calc_likelihoods_2():
        from numpy.testing import assert_array_almost_equal
        from numpy.testing import assert_almost_equal
        from myimage_analysis import calculate_nhi
        import matplotlib.pyplot as plt
        from matplotlib import cm
        from mpl_toolkits.axes_grid1 import ImageGrid

        av_image = np.array([[0, 0, 0, 0, 0],
                             [0, 1, 1, 1, 0],
                             [0, 1, 2, 1, 0],
                             [np.nan, 1, 1, 1, 0],
                             [0, 0, 0, 0, 0]])

        #av_image_error = np.random.normal(0.1, size=av_image.shape)
        av_image_error = 0.1 * np.ones(av_image.shape)

        #nhi_image = av_image + np.random.normal(0.1, size=av_image.shape)
        hi_cube = np.zeros((5, 5, 5))

        # make inner channels correlated with av
        hi_cube[:, :, :] = np.array(
            [
             [[  1., 0., 0., 0., 0.],
              [  np.nan, 0., 0., 0., 0.],
              [  0., 0., 0., 0., 0.],
              [  0., 0., 0., 0., 0.],
              [  1., 0., 0., 0., 10.],],

             [[  0., 0., 0., 0., 0.],
              [  0., 0., 2., 0., 0.],
              [  0., 0., 4., 0., 0.],
              [  0., 0., 2., 0., 0.],
              [  0., 0., 0., 0., 0.],],

             [[  0., 0., 0., 0., 0.],
              [  0., 0., 0., 2., 0.],
              [  0., 0., 0., 2., 0.],
              [  0., 0., 0., 2., np.nan],
              [  0., 0., 0., 0., 0.],],

             [[  0., 0., 0., 0., 0.],
              [  0., 2., 0., 0., 0.],
              [  0., 2., 0., 0., 0.],
              [  0., 2., 0., 0., 0.],
              [  0., 0., 0., 0., 0.],],

             [[  0., 0., 0., 0., 0.],
              [  0., 0., 0., 0., np.nan],
              [  0., 0., 0., 0., 0.],
              [  0., 0., 0., 0., 0.],
              [  1., 0., 0., 0., 0.2],],
             ]
             )

        if 1:
            fig = plt.figure(figsize=(4,4))
            imagegrid = ImageGrid(fig, (1,1,1),
                         nrows_ncols=(1,5),
                         ngrids=5,
                         cbar_mode="single",
                         cbar_location='top',
                         cbar_pad="2%",
                         cbar_size='3%',
                         axes_pad=0.1,
                         aspect=True,
                         label_mode='L',
                         share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            for i in xrange(5):
                im = imagegrid[i].imshow(hi_cube[i, :, :],
                                         origin='lower',
                                         #aspect='auto',
                                         cmap=cmap,
                                         interpolation='none',
                                         vmin=0,
                                         vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/hi_cube.png')

        # make edge channels totally uncorrelated
        #hi_cube[(0, 4), :, :] = np.arange(0, 25).reshape(5,5)
        #hi_cube[(0, 4), :, :] = - np.ones((5,5))

        hi_vel_axis = np.arange(0, 5, 1)

        # add intercept
        intercept_answer = 0.9
        av_image = av_image + intercept_answer

        if 1:
            fig = plt.figure(figsize=(4,4))
            params = {
              'figure.figsize': (1, 1),
              #'figure.titlesize': font_scale,
             }
            plt.rcParams.update(params)
            imagegrid = ImageGrid(fig, (1,1,1),
                         nrows_ncols=(1,1),
                         ngrids=1,
                         cbar_mode="single",
                         cbar_location='top',
                         cbar_pad="2%",
                         cbar_size='3%',
                         axes_pad=0.1,
                         aspect=True,
                         label_mode='L',
                         share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            im = imagegrid[0].imshow(av_image,
                                         origin='lower',
                                         #aspect='auto',
                                         cmap=cmap,
                                         interpolation='none',
                                         vmin=0,
                                         vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/av.png')

        width_grid = np.arange(0, 5, 1)
        dgr_grid = np.arange(0, 1, 0.1)
        intercept_grid = np.arange(-1, 1, 0.1)
        vel_center = 2

        results = \
            cloudpy._calc_likelihoods(
                              hi_cube=hi_cube / 1.832e-2,
                              hi_vel_axis=hi_vel_axis,
                              vel_center=vel_center,
                              av_image=av_image,
                              av_image_error=av_image_error,
                              width_grid=width_grid,
                              dgr_grid=dgr_grid,
                              intercept_grid=intercept_grid,
                              )

        dgr_answer = 1/2.0
        width_answer = 2
        width = results['width_max']
        dgr = results['dgr_max']
        intercept = results['intercept_max']
        print width

        if 0:
            width = width_answer
            intercept = intercept_answer
            dgr = dgr_answer

        vel_range = (vel_center - width / 2.0, vel_center + width / 2.0)

        nhi_image = calculate_nhi(cube=hi_cube,
                                  velocity_axis=hi_vel_axis,
                                  velocity_range=vel_range) / 1.823e-2
        if 1:
            fig = plt.figure(figsize=(4,4))
            imagegrid = ImageGrid(fig, (1,1,1),
                         nrows_ncols=(1,1),
                         ngrids=1,
                         cbar_mode="single",
                         cbar_location='top',
                         cbar_pad="2%",
                         cbar_size='3%',
                         axes_pad=0.1,
                         aspect=True,
                         label_mode='L',
                         share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            im = imagegrid[0].imshow(nhi_image,
                                     origin='lower',
                                     #aspect='auto',
                                     cmap=cmap,
                                     interpolation='none',
                                     vmin=0,
                                     vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/nhi.png')
        if 1:
            fig = plt.figure(figsize=(4,4))
            imagegrid = ImageGrid(fig, (1,1,1),
                         nrows_ncols=(1,1),
                         ngrids=1,
                         cbar_mode="single",
                         cbar_location='top',
                         cbar_pad="2%",
                         cbar_size='3%',
                         axes_pad=0.1,
                         aspect=True,
                         label_mode='L',
                         share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            im = imagegrid[0].imshow(nhi_image * dgr + intercept,
                                         origin='lower',
                                         #aspect='auto',
                                         cmap=cmap,
                                         interpolation='none',
                                         vmin=0,
                                         vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/av_model.png')

        print('residuals = ')
        print(av_image - (nhi_image * dgr + intercept))
        print('dgr', dgr)
        print('intercept', intercept)
        print('width', width)

        assert_almost_equal(results['intercept_max'], intercept_answer)
        assert_almost_equal(results['dgr_max'], dgr_answer)
        assert_almost_equal(results['width_max'], width_answer)
def correlate_hi_av(hi_cube=None,
                    hi_velocity_axis=None,
                    hi_noise_cube=None,
                    av_image=None,
                    av_image_error=None,
                    velocity_centers=None,
                    velocity_widths=None,
                    return_correlations=True,
                    dgr=None,
                    plot_results=True,
                    results_filename='',
                    likelihood_filename=None,
                    clobber=False,
                    hi_vel_range_conf=0.68):
    '''
    Parameters
    ----------

    Returns
    -------
    hi_vel_range : tuple
        Lower and upper bound of HI velocity range in km/s which provides the
        best correlated N(HI) distribution with Av.
    correlations : array-like, optional
        Array of Pearson correlation coefficients corresponding to each
        permutation through the velocity centers and velocity widths.

    '''

    import numpy as np
    from scipy.stats import pearsonr
    from scipy.stats import kendalltau
    from myimage_analysis import calculate_nhi
    from scipy import signal
    from os import path
    from astropy.io import fits

    # Check if likelihood grid should be derived
    if likelihood_filename is not None:
        if not path.isfile(likelihood_filename):
            perform_mle = True
            write_mle = True
        elif clobber:
            perform_mle = True
            write_mle = True
        else:
            perform_mle = False
            write_mle = False
    # If no filename provided, do not read file and do not write file
    else:
        write_mle = False
        perform_mle = True

    if perform_mle:
        # calculate the velocity ranges given a set of centers and widths
        velocity_ranges = np.zeros(shape=[len(velocity_centers) * \
                len(velocity_widths),2])
        count = 0
        for i, center in enumerate(velocity_centers):
            for j, width in enumerate(velocity_widths):
                velocity_ranges[count, 0] = center - width / 2.
                velocity_ranges[count, 1] = center + width / 2.
                count += 1

        # calculate the correlation coefficient for each velocity range
        correlations = np.zeros(velocity_ranges.shape[0])
        pvalues = np.zeros(velocity_ranges.shape[0])

        for i, velocity_range in enumerate(velocity_ranges):
            nhi_image_temp, nhi_image_error = calculate_nhi(
                cube=hi_cube,
                velocity_axis=hi_velocity_axis,
                velocity_range=velocity_range,
                noise_cube=hi_noise_cube)

            nhi_image = np.ma.array(nhi_image_temp,
                                    mask=np.isnan(nhi_image_temp))

            # Avoid NaNs
            indices = np.where((nhi_image_temp == nhi_image_temp) & \
                               (av_image == av_image))

            nhi_image_corr = nhi_image_temp[indices]
            nhi_image_error_corr = nhi_image_error[indices]
            av_image_corr = av_image[indices]
            if type(av_image_error) != float:
                av_image_error_corr = av_image_error[indices]
            else:
                av_image_error_corr = av_image_error

            # Create model of Av with N(HI) and DGR
            av_image_model = nhi_image_corr * dgr
            av_image_model_error = nhi_image_error_corr * dgr

            print('median of model and data (mag)')
            print(np.median(av_image_model), np.median(av_image_corr))

            logL = calc_logL(av_image_model,
                             av_image_corr,
                             data_error=av_image_error_corr)

            correlations[i] = -logL

            # Shows progress each 10%
            total = float(correlations.shape[0])
            abs_step = int((total * 1) / 10) or 10
            if i and not i % abs_step:
                print "\t{0:.0%} processed".format(i / total)

        # Normalize the log likelihoods
        correlations -= correlations.max()

        # Convert to likelihoods
        correlations = np.exp(correlations)

        # Normalize the likelihoods
        correlations = correlations / np.sum(correlations)

        # Avoid nans
        correlations = np.ma.array(correlations,
                                   mask=(correlations != correlations))

        # Reshape array
        correlations_image = np.empty(
            (velocity_centers.shape[0], velocity_widths.shape[0]))
        correlations_image[:, :] = np.NaN
        count = 0
        for i, center in enumerate(velocity_centers):
            for j, width in enumerate(velocity_widths):
                correlations_image[i, j] = correlations[count]
                count += 1

        # Write out fits file of likelihoods
        if write_mle:
            print('Writing likelihood grid to file:')
            print(likelihood_filename)
            header = fits.Header()
            header['NAXIS'] = 2
            header['CTYPE1'] = 'CENTERS'
            header['CTYPE2'] = 'WIDTHS'
            header['CRPIX1'] = 0
            header['CRPIX2'] = 0
            header['CRVAL1'] = velocity_centers[0]
            header['CRVAL2'] = velocity_widths[0]
            header['CDELT1'] = velocity_centers[1] - velocity_centers[0]
            header['CDELT2'] = velocity_widths[1] - velocity_widths[0]

            hdu = fits.PrimaryHDU(correlations_image, header=header)

            hdu.writeto(likelihood_filename, clobber=clobber)

    # Load file of likelihoods
    elif not perform_mle:
        print('Reading likelihood grid file:')
        print(likelihood_filename)

        hdu = fits.open(likelihood_filename)
        correlations_image = hdu[0].data

        if len(velocity_centers) != correlations_image.shape[0] or \
            len(velocity_widths) != correlations_image.shape[1]:
            raise ValueError('Specified parameter grid not the same as in' + \
                    'loaded data likelihoods.')

    # Define parameter resolutions
    delta_center = velocity_centers[1] - velocity_centers[0]
    delta_width = velocity_widths[1] - velocity_widths[0]

    # Derive marginal distributions of both centers and widths
    center_corr = np.sum(correlations_image, axis=1) / \
            np.sum(correlations_image)
    width_corr = np.sum(correlations_image, axis=0) / \
            np.sum(correlations_image)

    # Derive confidence intervals of parameters
    center_confint = threshold_area(velocity_centers,
                                    center_corr,
                                    area_fraction=hi_vel_range_conf)
    width_confint = threshold_area(velocity_widths,
                                   width_corr,
                                   area_fraction=hi_vel_range_conf)

    print('Velocity widths = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(width_confint[0],
                                                    width_confint[2],
                                                    np.abs(width_confint[1])))
    print('Velocity centers = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(center_confint[0],
                                                    center_confint[2],
                                                    np.abs(center_confint[1])))

    # Write PDF
    center = center_confint[0]
    upper_lim = (center_confint[0] + width_confint[0] / 2.)
    lower_lim = (center_confint[0] - width_confint[0] / 2.)
    upper_lim_error = (center_confint[2]**2 + width_confint[2]**2)**0.5
    lower_lim_error = (center_confint[1]**2 + width_confint[1]**2)**0.5

    vel_range_confint = (lower_lim, upper_lim, lower_lim_error,
                         upper_lim_error)

    if plot_results:
        plot_correlations(correlations_image,
                          velocity_centers,
                          velocity_widths,
                          show=0,
                          returnimage=False,
                          filename=results_filename)
        plot_correlations_hist(correlations_image,
                               velocity_centers,
                               velocity_widths,
                               center_pdf=center_corr,
                               width_pdf=width_corr,
                               center_confint=center_confint,
                               width_confint=width_confint,
                               show=0,
                               returnimage=False,
                               filename=results_filename)

    if not return_correlations:
        return vel_range_confint
    else:
        return vel_range_confint, correlations_image, center_corr, width_corr
예제 #21
0
def main(dgr=None,
         vel_range=(-5, 15),
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Which cores to include in analysis?
    cores_to_keep = [  # taur
        'L1495',
        'L1495A',
        'B213',
        'L1498',
        'B215',
        'B18',
        'B217',
        'B220-1',
        'B220-2',
        'L1521',
        'L1524',
        'L1527-1',
        'L1527-2',
        # Calif
        'L1536',
        'L1483-1',
        'L1483-2',
        'L1482-1',
        'L1482-2',
        'L1478-1',
        'L1478-2',
        'L1456',
        'NGC1579',
        #'L1545',
        #'L1517',
        #'L1512',
        #'L1523',
        #'L1512',
        # Pers
        'B5',
        'IC348',
        'B1E',
        'B1',
        'NGC1333',
        'B4',
        'B3',
        'L1455',
        'L1448',
    ]

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_radiance_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_radiance_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nLoading global property file {0:s}.txt'.format(prop_file))
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    # Define velocity range
    props['hi_velocity_range'] = vel_range

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                                             velocity_axis=velocity_axis,
                                             velocity_noise_range=[90, 110],
                                             header=hi_header,
                                             Tsys=30.,
                                             filename=hi_dir +
                                             noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir +
                                                   noise_cube_filename,
                                                   header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits', 'plot_limit',
                                              'region_name_pos'))

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    cloud_dict = {
        'taurus': {},
        'perseus': {},
        'california': {},
    }

    # load Planck Av and GALFA HI images, on same grid
    for cloud in cloud_dict:

        print('\nLoading core properties for {0:s}'.format(cloud))

        file_dir = '/d/bip3/ezbc/{0:s}/data/av/'.format(cloud)

        # define core properties
        with open('/d/bip3/ezbc/{0:s}/data/python_output/'.format(cloud) + \
                  'core_properties/{0:s}_core_properties.txt'.format(cloud),
                  'r') as f:
            cores = json.load(f)

        # Load core regions from DS9 files
        if cloud == 'aldobaran':
            region_cloud = 'california'
        else:
            region_cloud = cloud
        core_filename = region_dir.replace('multicloud',region_cloud) + \
                        '/ds9_regions/{0:s}_av_poly_cores'.format(region_cloud)

        cores = load_ds9_core_region(cores,
                                     filename_base=core_filename,
                                     header=av_header)

        cores = convert_core_coordinates(cores, av_header)

        # Remove cores
        cores_to_remove = []
        for core in cores:
            if core not in cores_to_keep:
                cores_to_remove.append(core)
        for core_to_remove in cores_to_remove:
            del cores[core_to_remove]

        cloud_dict[cloud]['cores'] = cores

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        filename = 'multicloud_av_cores_map' + \
                   '.{0:s}'.format(figure_type)

        print('\nSaving Av cores map to \n' + filename)

        plot_cores_map(
            header=av_header,
            av_image=av_image,
            limits=props['plot_limit']['pixel'],
            regions=props['regions'],
            cloud_dict=cloud_dict,
            cores_to_keep=cores_to_keep,
            props=props,
            hi_vlimits=(0, 20),
            av_vlimits=(0, 16),
            #av_vlimits=(0.1,30),
            savedir=figure_dir + 'maps/',
            filename=filename,
            show=False)
예제 #22
0
def run_cloud_analysis(global_args, ):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
        'figure_dir': figure_dir,
        'cloud_name': cloud_name,
        'filename_base': filename_base,
        'plot_diagnostics': global_args['plot_diagnostics'],
        #'av_nhi_contour': av_nhi_contour,
        'av_nhi_contour': True,
        'av_nhi_limits': [0, 20, -1, 9],
        #'av_nhi_limits': None,
    }

    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])

    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)

    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)

    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                                 statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
        'velocity_range': velocity_range,
        'gauss_fits': gauss_fits,
        'comp_num': comp_num,
        'hi_range_error': hi_range_error,
        'vel_range': velocity_range,
        'gauss_fit_kwargs': gauss_fit_kwargs,
    }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_spectrum,
        gauss_fits=gauss_fits,
        comp_num=comp_num,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_dr1_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_dr1_spectrum,
        gauss_fits=gauss_fits_dr1,
        comp_num=comp_num_dr1,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range_dr1,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(
        nhi_image=nhi_image / nhi_image_dr1,
        header=hi_header,
        limits=[65, 45, 25, 35],
        filename=filename,
        show=0,
        cb_text='DR2 / DR1',
        #hi_vlimits=[0.91, 0.93],
    )
def calc_likelihood_hi_av(hi_cube=None, hi_velocity_axis=None,
        hi_noise_cube=None, av_image=None, av_image_error=None,
        velocity_centers=None, velocity_widths=None, return_likelihoods=True,
        dgrs=None, plot_results=True, results_filename='',
        likelihood_filename=None, clobber=False, conf=0.68,
        contour_confs=None):

    '''
    Parameters
    ----------

    Returns
    -------
    hi_vel_range : tuple
        Lower and upper bound of HI velocity range in km/s which provides the
        best likelihoodelated N(HI) distribution with Av.
    likelihoods : array-like, optional
        Array of Pearson likelihoodelation coefficients likelihoodesponding to each
        permutation through the velocity centers and velocity widths.

    '''

    import numpy as np
    from scipy.stats import pearsonr
    from scipy.stats import kendalltau
    from myimage_analysis import calculate_nhi
    from scipy import signal
    from os import path
    from astropy.io import fits

    # Check if likelihood grid should be derived
    if likelihood_filename is not None:
        if not path.isfile(likelihood_filename):
            perform_mle = True
            write_mle = True
        elif clobber:
            perform_mle = True
            write_mle = True
        else:
            perform_mle = False
            write_mle = False
    # If no filename provided, do not read file and do not write file
    else:
        write_mle = False
        perform_mle = True

    if perform_mle:
        # calculate the velocity ranges given a set of centers and widths
        velocity_ranges = np.zeros(shape=[len(velocity_centers) * \
                len(velocity_widths),2])
        count = 0
        for i, center in enumerate(velocity_centers):
            for j, width in enumerate(velocity_widths):
                velocity_ranges[count, 0] = center - width/2.
                velocity_ranges[count, 1] = center + width/2.
                count += 1

        # calculate the likelihoodelation coefficient for each velocity range
        likelihoods = np.zeros((len(velocity_centers),
                                 len(velocity_widths),
                                 len(dgrs)))

        # Progress bar parameters
        total = float(likelihoods.size)
        count = 0

        for i, velocity_center in enumerate(velocity_centers):
            for j, velocity_width in enumerate(velocity_widths):
                for k, dgr in enumerate(dgrs):

                    velocity_range = (velocity_center - velocity_width / 2.,
                                      velocity_center + velocity_width / 2.)

                    nhi_image_temp, nhi_image_error = \
                            calculate_nhi(cube=hi_cube,
                                velocity_axis=hi_velocity_axis,
                                velocity_range=velocity_range,
                                noise_cube=hi_noise_cube)

                    # Avoid NaNs
                    indices = np.where((nhi_image_temp == nhi_image_temp) & \
                                       (av_image == av_image))

                    nhi_image_likelihood = nhi_image_temp[indices]
                    nhi_image_error_likelihood = nhi_image_error[indices]
                    av_image_likelihood = av_image[indices]
                    if type(av_image_error) != float:
                        av_image_error_likelihood = av_image_error[indices]
                    else:
                        av_image_error_likelihood = av_image_error

                    # Create model of Av with N(HI) and DGR
                    av_image_model = nhi_image_likelihood * dgr
                    av_image_model_error = nhi_image_error_likelihood * dgr

                    logL = calc_logL(av_image_model,
                                     av_image_likelihood,
                                     data_error=av_image_error_likelihood)

                    likelihoods[i, j, k] = -logL

                    # Shows progress each 10%
                    count += 1
                    abs_step = int((total * 1)/100) or 100
                    if count and not count % abs_step:
                        print "\t{0:.0%} processed".format(count/total)

        # Normalize the log likelihoods
        likelihoods -= likelihoods.max()

        # Convert to likelihoods
        likelihoods = np.exp(likelihoods)

        # Normalize the likelihoods
        likelihoods = likelihoods / \
            np.sum(likelihoods[~np.isnan(likelihoods)])

        # Write out fits file of likelihoods
        if write_mle:
        	write_mle_tofits(filename=likelihood_filename,
        	                 velocity_centers=velocity_centers,
        	                 velocity_widths=velocity_widths,
        	                 dgrs=dgrs,
        	                 likelihoods=likelihoods,
        	                 clobber=clobber)

        # Avoid nans
        likelihoods = np.ma.array(likelihoods,
                mask=(likelihoods != likelihoods))

    # Load file of likelihoods
    elif not perform_mle:
        print('Reading likelihood grid file:')
        print(likelihood_filename)

        hdu = fits.open(likelihood_filename)
        likelihoods = hdu[0].data

        if len(velocity_centers) != likelihoods.shape[0] or \
            len(velocity_widths) != likelihoods.shape[1]:
            raise ValueError('Specified parameter grid not the same as in' + \
                    'loaded data likelihoods.')

        likelihoods = np.ma.array(likelihoods,
                mask=(likelihoods != likelihoods))

    # Define parameter resolutions
    #delta_center = velocity_centers[1] - velocity_centers[0]
    #delta_width = velocity_widths[1] - velocity_widths[0]

    # Derive marginal distributions of both centers and widths
    center_likelihood = np.sum(likelihoods, axis=(1,2)) / \
            np.sum(likelihoods)
    width_likelihood = np.sum(likelihoods, axis=(0,2)) / \
            np.sum(likelihoods)
    dgr_likelihood = np.sum(likelihoods, axis=(0,1)) / \
            np.sum(likelihoods)

    # Derive confidence intervals of parameters
    center_confint = threshold_area(velocity_centers,
                                    center_likelihood,
                                    area_fraction=conf)
    width_confint = threshold_area(velocity_widths,
                                   width_likelihood,
                                   area_fraction=conf)
    dgr_confint = threshold_area(dgrs,
                                 dgr_likelihood,
                                 area_fraction=conf)

    print('Velocity widths = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(width_confint[0],
                                                    width_confint[2],
                                                    np.abs(width_confint[1])))
    print('Velocity centers = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(center_confint[0],
                                                    center_confint[2],
                                                    np.abs(center_confint[1])))
    print('DGRs = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(dgr_confint[0],
                                                    dgr_confint[2],
                                                    np.abs(dgr_confint[1])))

    # Write PDF
    center = center_confint[0]
    upper_lim = (center_confint[0] + width_confint[0]/2.)
    lower_lim = (center_confint[0] - width_confint[0]/2.)
    upper_lim_error = (center_confint[2]**2 + width_confint[2]**2)**0.5
    lower_lim_error = (center_confint[1]**2 + width_confint[1]**2)**0.5

    vel_range_confint = (lower_lim, upper_lim, lower_lim_error,
            upper_lim_error)

    if plot_results:
        #plot_likelihoods(likelihoods[:,:, len(dgrs)/2],
        #                  velocity_centers,
        #                  velocity_widths,
        #                  show=0,
        #                  returnimage=False,
        #                  filename=results_filename)
        plot_likelihoods_hist(likelihoods,
                              velocity_centers,
                              velocity_widths,
                              x_confint=center_confint,
                              y_confint=width_confint,
                              plot_axes=('centers', 'widths'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_cw.png',
                              contour_confs=contour_confs)
        plot_likelihoods_hist(likelihoods,
                              velocity_centers,
                              dgrs,
                              x_confint=center_confint,
                              y_confint=dgr_confint,
                              plot_axes=('centers', 'dgrs'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_cd.png',
                              contour_confs=contour_confs)
        plot_likelihoods_hist(likelihoods,
                              velocity_widths,
                              dgrs,
                              x_confint=width_confint,
                              y_confint=dgr_confint,
                              plot_axes=('widths', 'dgrs'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_wd.png',
                              contour_confs=contour_confs)

    if not return_likelihoods:
        return vel_range_confint, dgr_confint
    else:
        return (vel_range_confint, dgr_confint, likelihoods,
            center_likelihood, width_likelihood, dgr_likelihood)
예제 #24
0
def main(dgr=None, vel_range=None, vel_range_type='single', region=None,
        av_data_type='planck', use_binned_images=False):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Name of noise cube
    noise_cube_filename = \
            'california_hi_galfa_cube_regrid_planckres_noise' + bin_string + \
            '.fits'

    # Name of property files results are written to
    prop_file = 'california_global_properties_' + av_data_type + '_scaled'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
    	region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/av/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'california_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'california_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
    	print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'california_av_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'california_av_error_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(co_dir + \
                    'california_co_cfa_cube_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)

    # Load global properties of cloud
    # global properties written from script
    # 'av/california_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nReading global parameter file\n' + prop_file + '.txt')
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_width = props['hi_velocity_width_max']['value']
        vel_center = np.array(props['hi_velocity_center']['value'])
        vel_center = -4.0
        vel_range = (vel_center - vel_width / 2.0,
                     vel_center + vel_width / 2.0)
    if dgr is not None:
        props['dust2gas_ratio_max']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio_max']['value']
        intercept = props['intercept_max']['value']

    fit_params = {}
    fit_params['dgr'] = dgr
    fit_params['intercept'] = intercept

    # define core properties
    with open(core_dir + 'california_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'california_av_boxes_',
            header = hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    print('\nIntercept:')
    print('%.2f mag' % (intercept))

    # Get mask and mask images
    mask = np.asarray(props['mask' + bin_string])

    mask_images = 1

    if mask_images:
        av_image[mask] = np.nan
        nhi_image[mask] = np.nan
        av_image_error[mask] = np.nan
        av_model[mask] = np.nan

    indices = ((np.isnan(av_model)) & \
               (np.isnan(av_image)) & \
               (np.isnan(av_image_error)))

    if 1:
        import matplotlib.pyplot as plt
        plt.imshow(av_image)
        plt.show()

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            filename = 'california_av_vs_nhi_' + av_data_type + bin_string

        filename = figure_dir + filename + '.' + figure_type

        print('\nSaving Av model image to \n' + filename)

        plot_av_vs_nhi(nhi_image,
                av_image,
                av_error=av_image_error,
                #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
                fit_params=fit_params,
                limits=[5,40,-0.2,2],
                #limits=[0,30,0,10],
                gridsize=(10,10),
                #scale=('log', 'log'),
                #scale=('linear', 'linear'),
                filename=filename,
                contour_plot=not use_binned_images,
                std=0.22,
                )
예제 #25
0
def main():

    from myimage_analysis import bin_image, calculate_nhi
    from mycoords import make_velocity_axis
    from astropy.io import fits
    import numpy as np

    os.chdir('/d/bip3/ezbc/shield/749237_lowres/')

    # If true, deletes files to be written
    clobber = 1

    # First, change zeros in lee image to nans
    in_images = ('749237_rebin_cube.fits',)

    # Load the images into miriad
    out_images = []
    for in_image in in_images:

        print('Binning cube:\n' + in_image)

        cube, header = fits.getdata(in_image, header=True)

        # set freq0 setting
        header['FREQ0'] = 1.4204058E+09
        header['RESTFREQ'] = 1.4204058E+09
        header['CTYPE3'] = 'VELO'

        beamsize = header['BMAJ']
        cdelt = np.abs(header['CDELT1'])
        binsize = int(beamsize / cdelt)

        print('\tBinsize = ' + str(binsize))


        if 1:
            # cube measurement error = 700 uJy/Beam = 0.7 mJy/Beam
            # cube flux calibration error = 10%
            # add errors quadratically
            cube_std = np.nanstd(cube[0, :, :])
            cube_error = ((0.1 * cube)**2 + cube_std**2)**0.5

        cube_bin, header_bin = bin_image(cube,
                                         binsize=(1, binsize, binsize),
                                         header=header,
                                         statistic=np.nanmean,
                                         )

        # cube measurement error = 700 uJy/Beam = 0.7 mJy/Beam
        # cube flux calibration error = 10%
        # add errors quadratically
        cube_bin_std = np.nanstd(cube_bin[0, :, :])
        cube_error_bin = ((0.1 * cube_bin)**2 + cube_bin_std**2)**0.5

        if 0:
            noise_func = lambda x: (1 / np.nansum(x**-2))**0.5
            cube_error_bin = bin_image(cube_error,
                                       binsize=(1, binsize, binsize),
                                       statistic=noise_func,
                                       )

        fits.writeto(in_image,
                     cube,
                     header,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube', 'cube_error'),
                     cube_error,
                     header,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube.fits', 'cube_regrid.fits'),
                     cube_bin,
                     header_bin,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube', 'cube_error_regrid'),
                     cube_error_bin,
                     header_bin,
                     clobber=clobber)
        #else:
        #    cube_bin, header_bin = \
        #        fits.getdata(in_image.replace('cube.fits', 'cube_regrid.fits'),
        #                     clobber=clobber, header=True)

        # make nhi_image
        velocity_axis = make_velocity_axis(header_bin)

        # convert to T_B
        cube_bin_tb = 1.36 * 21**2 * cube_bin * 1000.0 / \
                      (header_bin['BMAJ'] * 3600.) / \
                      (3600. * header_bin['BMIN'])

        cube_tb = 1.36 * 21**2 * cube * 1000.0 / \
                      (header['BMAJ'] * 3600.) / \
                      (3600. * header['BMIN'])

        # convert moment zero images to column density units.
        #	Recall:  1 K = (7.354E-8)*[Bmaj(")*Bmin(")/lamda^2(m)] Jy/Bm

        #	Here, units of images are Jy/Bm m/s; cellsize = 2";
        #	    lambda = 0.211061140507 m

        #	Thus, for the 21 cm line of Hydrogen, we have:

        #	    1 K = Bmaj(")*Bmin(")/(6.057493205E5) Jy/Bm
        #			---- OR ----
        #	    1 Jy/Bm = (6.057493205E5)/[Bmaj(")*Bmin(")]

        #	Now, recall that: N_HI = (1.8224E18 cm^-2)*[T_b (K)]*int(dv)
        #		-- For moment maps in K km/sec, just input the values
        #		& multiply by coefficient.
        #	   -- Assure that units are Jy/Bm km/sec (i.e., divide by 1000)
        #	   Leave in units of 1E20 cm^-2 by dividing by 1E20:

        #	   For a x beam:
        #               N_HI (cm^-2) = (image) *
        #		[(6.057493205E5)/(*)] * (1/1000) * (1.8224E18 cm^-2) *
        #		(1/1E20)
        #		N_HI (cm^-2) = (image)*

        nhi_image = calculate_nhi(cube_bin_tb,
                                  velocity_axis=velocity_axis,
                                  header=header_bin,
                                  fits_filename=\
                            in_image.replace('cube.fits', 'nhi_regrid.fits')  )
        nhi_image = calculate_nhi(cube_tb,
                                  velocity_axis=velocity_axis,
                                  header=header,
                                  fits_filename=\
                            in_image.replace('cube.fits', 'nhi.fits')  )
예제 #26
0
    def test_scatter_contour():

        from astropy.io import fits
        from myimage_analysis import calculate_nhi
        import mygeometry as myg
        from mycoords import make_velocity_axis

        # Parameters
        # ----------
        levels = (0.99, 0.985, 0.7)
        levels = (
            0.999,
            0.998,
            0.96,
            0.86,
            0.58,
        )
        levels = 7
        levels = np.logspace(np.log10(0.995), np.log10(0.50), 5)
        log_counts = 0
        limits = [1, 10, -3, 30]
        limits = None

        # Begin test
        # ----------
        data_dir = '/d/bip3/ezbc/perseus/data/'
        av = fits.getdata(data_dir +
                          'av/perseus_av_planck_tau353_5arcmin.fits')
        hi, hi_header = fits.getdata(data_dir + \
                          'hi/perseus_hi_galfa_cube_regrid_planckres.fits',
                          header=True)

        hi_vel_axis = make_velocity_axis(hi_header)

        nhi = calculate_nhi(
            cube=hi,
            velocity_axis=hi_vel_axis,
            velocity_range=[0, 10],
        )

        # Drop the NaNs from the images
        indices = np.where((av == av) &\
                           (nhi == nhi)
                           )

        av_nonans = av[indices]
        nhi_nonans = nhi[indices]

        fig, ax = plt.subplots()

        if limits is None:
            xmin = np.min(nhi_nonans)
            ymin = np.min(av_nonans)
            xmax = np.max(nhi_nonans)
            ymax = np.max(av_nonans)
            xscalar = 0.25 * xmax
            yscalar = 0.25 * ymax
            limits = [
                xmin - xscalar, xmax + xscalar, ymin - yscalar, ymax + yscalar
            ]

        contour_range = ((limits[0], limits[1]), (limits[2], limits[3]))

        cmap = myplt.truncate_colormap(plt.cm.binary, 0.2, 1, 1000)

        l1 = myplt.scatter_contour(
            nhi_nonans.ravel(),
            av_nonans.ravel(),
            threshold=3,
            log_counts=log_counts,
            levels=levels,
            ax=ax,
            histogram2d_args=dict(bins=30, range=contour_range),
            plot_args=dict(marker='o',
                           linestyle='none',
                           color='black',
                           alpha=0.3,
                           markersize=2),
            contour_args=dict(
                #cmap=plt.cm.binary,
                cmap=cmap,
                #cmap=cmap,
            ),
        )

        scale = ['linear', 'linear']
        ax.set_xscale(scale[0], nonposx='clip')
        ax.set_yscale(scale[1], nonposy='clip')

        ax.set_xlim(limits[0], limits[1])
        ax.set_ylim(limits[2], limits[3])

        # Adjust asthetics
        ax.set_xlabel(r'$N($H$\textsc{i}) \times\,10^{20}$ cm$^{-2}$')
        ax.set_ylabel(r'$A_V$ [mag]')
        #ax.set_title(core_names[i])
        ax.legend(loc='lower right')

        plt.savefig('test_plots/test_scatter_contour.png')
예제 #27
0
def main(dgr=None,
         vel_range=None,
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'

    # Load Data
    # ---------
    # Load global properties of cloud
    # global properties written from script
    # 'av/perseus_analysis_global_properties.txt'
    prop_file = 'perseus_global_properties'  # _' + av_data_type
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if props['use_binned_image']:
        bin_string = '_bin'
    else:
        bin_string = ''

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_planck_radiance_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'perseus_av_error_planck_radiance_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_planck_5arcmin' + bin_string + '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'perseus_av_error_planck_5arcmin' + bin_string + '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename +
                                            bin_string + '.fits',
                                            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'perseus_co_cfa_cube_regrid_planckres' + bin_string + '.fits',
            return_header=True)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']
    if dgr is not None:
        props['dust2gas_ratio']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio']['value']

    # define core properties
    with open(core_dir + 'perseus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'perseus_av_boxes_',
                            header=hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    # Mask the images based on av trheshol
    co_data_nonans = np.copy(co_data)
    co_data_nonans[np.isnan(co_data_nonans)] = 0.0
    co_mom0 = np.sum(co_data_nonans, axis=0)
    mask = ((av_image > props['av_threshold']['value']) & \
            (co_mom0 > props['co_threshold']['value']))

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    # Get mask and mask images
    mask = np.asarray(props['mask'])

    av_image_masked = np.copy(av_image)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan
    av_image_masked[mask == 1] = np.nan

    av_error_masked = np.copy(av_image_error)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan
    av_error_masked[mask == 1] = np.nan

    av_model_masked = np.copy(av_model)
    #av_model_masked[(mask == 1) & (region_mask == 1)] = np.nan
    av_model_masked[mask == 1] = np.nan

    indices = ((np.isnan(av_model_masked)) & \
               (np.isnan(av_image_masked)) & \
               (np.isnan(av_image_error)))

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    # Create HI spectrum
    hi_cube[hi_cube != hi_cube] = 0
    hi_cube[:, mask == 1] = 0
    hi_spectrum = np.mean(hi_cube, axis=(1, 2))

    # Derive CO spectrum
    co_data[:, region_mask == 1] = 0
    co_data[np.isnan(co_data)] = 0
    co_spectrum = np.mean(co_data, axis=(1, 2))

    # Plot
    figure_types = [
        'png',
    ]  # 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'single_vel_range/perseus_av_model_map_' + \
                    av_data_type + '.%s' % figure_type
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/perseus_av_model_map_' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(
                        vel_range[i, 0], vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'perseus_av_model_map_region{0:.0f}'.format(region) + \
                       '.{0:s}'.format(figure_type)

        print('\nSaving Av model image to \n' + filename)

        plot_av_model(
            av_image=av_image_masked,
            av_model=av_model_masked,
            header=av_header,
            results=props,
            hi_velocity_axis=velocity_axis,
            vel_range=vel_range,
            hi_spectrum=hi_spectrum,
            #hi_limits=[-15, 25, -1, 10],
            hi_limits=[-15, 25, None, None],
            co_spectrum=co_spectrum,
            co_velocity_axis=co_velocity_axis,
            limits=props['plot_limit']['pixel'],
            savedir=figure_dir + 'maps/av_models/',
            filename=filename,
            show=False)

        plot_avmod_vs_av(
            (av_model_masked, ),
            (av_image_masked, ),
            av_errors=(av_error_masked, ),
            #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
            limits=[0, 1.5, 0, 1.5],
            savedir=figure_dir + 'av/',
            gridsize=(10, 10),
            #scale=('log', 'log'),
            #scale=('linear', 'linear'),
            filename='perseus_avmod_vs_av.%s' % figure_type,
            show=False,
            std=0.22,
        )

        plot_power_spectrum(av_image_masked - av_model_masked,
            filename_prefix='perseus_av_resid_power_spectrum_' + \
                            '{0:s}'.format(av_data_type),
            filename_suffix='.{0:s}'.format(figure_type),
            savedir=figure_dir + 'power_spectra/',
            show=False)
예제 #28
0
def plot_hi_width_correlation(cloud_results, ):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi
    import mygeometry as myg
    import mycoords
    from scipy.stats import pearsonr

    filename = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_width_correlations.png'

    cloud = cloud_results['cloud']
    props = cloud.props
    fit_params = {
        'dgr': props['dust2gas_ratio_max']['value'],
        'intercept': props['intercept_max']['value']
    }

    if 1:
        if cloud_results['args']['data_type'] == 'lee12':
            av_filename = cloud.av_filename.replace('iris', '2mass')
        else:
            av_filename = cloud.av_filename

        av_data_2mass, av_header = fits.getdata(av_filename, header=True)

        av_filename = av_filename.replace('lee12_2mass_regrid_planckres',
                                          'planck_tau353_5arcmin')
        av_data_planck, av_header = fits.getdata(av_filename, header=True)
        hi_data = fits.getdata(cloud.hi_filename)

    # Derive relevant region
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data_2mass)
    region_mask = cloud.region_mask

    widths = np.arange(2, 80, 2)
    vel_center = 5
    correlations = np.empty(widths.shape)
    correlations_masked_2mass = np.empty(widths.shape)
    correlations_masked_planck = np.empty(widths.shape)

    for i, width in enumerate(widths):

        vel_range = (vel_center - width / 2.0, vel_center + width / 2.0)

        nhi_image = calculate_nhi(
            cube=hi_data,
            velocity_axis=cloud.hi_vel_axis,
            velocity_range=vel_range,
        )

        #print av_data.shape, region_mask.shape, nhi_image.shape
        nan_mask = (nhi_image < 0) | (np.isnan(av_data_2mass)) | \
                   (np.isnan(nhi_image))

        mask = np.copy(nan_mask)
        mask[(region_mask) | (cloud.mask)] = True

        lee12_mask = np.copy(nan_mask)
        lee12_mask[av_data_2mass < 5 * 0.20] = True

        # mask
        nhi_image_masked = nhi_image[~mask]
        av_data_2mass_masked = av_data_2mass[~mask]
        av_data_planck_masked = av_data_planck[~mask]

        # derive correlations for each mask
        correlations_masked_2mass[i] = \
                pearsonr(nhi_image_masked, av_data_2mass_masked)[0]
        correlations_masked_planck[i] = \
                pearsonr(nhi_image_masked, av_data_planck_masked)[0]
        correlations[i] = pearsonr(nhi_image[~lee12_mask],
                                   av_data_2mass[~lee12_mask])[0]

    import matplotlib.pyplot as plt
    plt.close()
    plt.clf
    av_masked = np.copy(av_data_2mass)
    av_masked[mask] = np.nan
    plt.imshow(av_masked, interpolation='nearest', origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/av_lee12map.png')

    cloudpy.plot_hi_width_correlation(widths,
                                      correlations,
                                      correlations_masked_2mass=\
                                              correlations_masked_2mass,
                                      correlations_masked_planck=\
                                              correlations_masked_planck,
                                      filename=filename,
                                      #limits=[0, 80, 0, 0.4]
                                      )
예제 #29
0
def main(dgr=None,
         vel_range=None,
         vel_range_type='single',
         region=None,
         av_data_type='planck',
         use_binned_images=False):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Name of noise cube
    noise_cube_filename = \
            'taurus_hi_galfa_cube_regrid_planckres_noise' + bin_string + \
            '.fits'

    # Name of property files results are written to
    prop_file = 'taurus_global_properties_' + av_data_type + '_scaled'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/taurus/figures/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    co_dir = '/d/bip3/ezbc/taurus/data/co/'
    core_dir = '/d/bip3/ezbc/taurus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/taurus/data/python_output/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'taurus_av_error_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                            return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(co_dir + \
                    'taurus_co_cfa_cube_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)

    # Load global properties of cloud
    # global properties written from script
    # 'av/taurus_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nReading global parameter file\n' + prop_file + '.txt')
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']
    if dgr is not None:
        props['dust2gas_ratio']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio']['value']

    # define core properties
    with open(core_dir + 'taurus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'taurus_av_boxes_',
                            header=hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    # Get mask and mask images
    mask = np.asarray(props['mask' + bin_string])

    mask_images = False
    av_image_masked = np.copy(av_image)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_error_masked = np.copy(av_image_error)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_model_masked = np.copy(av_model)
    #av_model_masked[(mask == 1) & (region_mask == 1)] = np.nan

    if mask_images:
        av_image_masked[mask] = np.nan
        av_error_masked[mask] = np.nan
        av_model_masked[mask] = np.nan

    indices = ((np.isnan(av_model_masked)) & \
               (np.isnan(av_image_masked)) & \
               (np.isnan(av_image_error)))

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    if 0:
        import matplotlib.pyplot as plt
        av_plot_data = np.copy(av_image)
        av_plot_data[mask] = np.nan
        plt.imshow(av_plot_data, origin='lower')
        plt.xlim(props['plot_limit_bin']['pixel'][0:3:2])
        plt.ylim(props['plot_limit_bin']['pixel'][1:4:2])
        plt.show()

    # Create HI spectrum
    hi_cube[hi_cube != hi_cube] = 0
    hi_cube[:, mask] = 0
    hi_spectrum = np.mean(hi_cube, axis=(1, 2))

    if not use_binned_images:
        # Derive CO spectrum
        co_data[:, mask] = 0
        co_data[np.isnan(co_data)] = 0
        co_spectrum = np.mean(co_data, axis=(1, 2))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'single_vel_range/taurus_av_model_map_' + \
                    av_data_type + bin_string
                #'dgr{0:.3f}_'.format(dgr) + \
                #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/taurus_av_model_map_' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(
                        vel_range[i, 0], vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'taurus_av_model_map_region{0:.0f}'.format(region)

        print('\nSaving Av model image to \n' + figure_dir + filename + \
                '.' + figure_type)

        if 0:
            plot_av_model(av_image=av_image_masked,
                          av_model=av_model_masked,
                          header=av_header,
                          results=props,
                          limits=props['plot_limit' + bin_string]['pixel'],
                          savedir=figure_dir + 'maps/av_models/',
                          filename=filename + '.' + figure_type,
                          show=False)

        if 1:
            #if not use_binned_images:
            if 0:
                plot_av_model(
                    av_image=av_image_masked,
                    av_model=av_model_masked,
                    header=av_header,
                    results=props,
                    hi_velocity_axis=velocity_axis,
                    vel_range=vel_range,
                    hi_spectrum=hi_spectrum,
                    #hi_limits=[-15, 25, -1, 10],
                    hi_limits=[-15, 25, None, None],
                    co_spectrum=co_spectrum,
                    co_velocity_axis=co_velocity_axis,
                    limits=props['plot_limit' + bin_string]['pixel'],
                    savedir=figure_dir + 'maps/av_models/',
                    filename=filename + '_spectra' + '.' + figure_type,
                    show=False)

            plot_avmod_vs_av(
                (av_model_masked, ),
                (av_image_masked, ),
                av_errors=(av_error_masked, ),
                #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
                limits=[0, 20, 0, 3],
                savedir=figure_dir + 'av/',
                gridsize=(10, 10),
                #scale=('log', 'log'),
                #scale=('linear', 'linear'),
                filename='taurus_avmod_vs_av%s.%s' % (bin_string, figure_type),
                show=False,
                std=0.22,
            )

        if 0:
            plot_power_spectrum(av_image_masked - av_model_masked,
                filename_prefix='taurus_av_resid_power_spectrum_' + \
                                '{0:s}'.format(av_data_type),
                filename_suffix='.{0:s}'.format(figure_type),
                savedir=figure_dir + 'power_spectra/',
                show=False)
예제 #30
0
def plot_av_vs_nhi(cloud_results):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_av_vs_nhi'

    cloud = cloud_results['cloud']
    props = cloud.props
    fit_params = {
        'dgr': props['dust2gas_ratio_max']['value'],
        'intercept': props['intercept_max']['value'],
        'dgr_error': props['dust2gas_ratio_error']['value'],
        'intercept_error': props['intercept_error']['value'],
    }

    from astropy.io import fits
    from myimage_analysis import calculate_nhi
    import mygeometry as myg
    import mycoords

    if 0:
        av_data = fits.getdata(cloud.av_filename)
        if cloud.av_error_filename is not None:
            av_error_data = fits.getdata(cloud.av_error_filename)
        else:
            av_error_data = np.ones(av_data.shape) * cloud.av_error
        hi_data = fits.getdata(cloud.hi_filename)

    av_data = fits.getdata(cloud.av_filename_bin)
    if cloud.av_error_filename_bin is not None:
        av_error_data = fits.getdata(cloud.av_error_filename_bin)
    else:
        av_error_data = np.ones(av_data.shape) * cloud.av_error
    hi_data = fits.getdata(cloud.hi_filename_bin)

    print np.nansum(hi_data)

    hi_data = cloud._subtract_comps(hi_data=hi_data)

    print np.nansum(hi_data)

    nhi_image = calculate_nhi(
        cube=hi_data,
        velocity_axis=cloud.hi_vel_axis,
        velocity_range=props['hi_velocity_range_max']['value'],
    )

    nhi_image[nhi_image < 0] = np.nan

    if cloud.av_background is not None:
        av_data = av_data - cloud.av_background

    if cloud_results['args']['bin_image']:
        contour_plot = False
    else:
        contour_plot = True

    #contour_plot = 1

    levels = np.logspace(np.log10(0.999), np.log10(0.5), 10)
    levels = 7

    cloudpy.plot_av_vs_nhi(
        nhi_image[~cloud.mask],
        av_data[~cloud.mask],
        av_error=av_error_data[~cloud.mask],
        filename=filename_base + '_masked.png',
        fit_params=fit_params,
        #limits=[3,20, 0, 3],
        title=cloud_results['args']['data_type'] + ', masked',
        levels=levels,
        contour_plot=contour_plot,
        #limits=[10, 20, -1, 1],
    )
    if 1:
        av_data, av_header = fits.getdata(cloud.av_filename, header=True)
        if cloud.av_error_filename is not None:
            av_error_data = fits.getdata(cloud.av_error_filename)
        else:
            av_error_data = np.ones(av_data.shape) * cloud.av_error
        hi_data = fits.getdata(cloud.hi_filename)

    hi_data = cloud._subtract_comps(hi_data=hi_data)

    # Derive relevant region
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    region_mask = cloud.region_mask

    nhi_image = calculate_nhi(
        cube=hi_data,
        velocity_axis=cloud.hi_vel_axis,
        velocity_range=props['hi_velocity_range_max']['value'],
    )

    mask = (region_mask) | (nhi_image < 0)

    nhi_image[mask] = np.nan
    av_data[mask] = np.nan

    cloudpy.plot_av_vs_nhi(nhi_image,
                      av_data,
                      av_error=av_error_data,
                      filename=filename_base + '.png',
                      fit_params=fit_params,
                      gridsize=(10,10),
                      #limits=[1,20, 0, 4],
                      title=cloud_results['args']['data_type'] + \
                            ', unmasked',
                      std=0.22,
                      contour_plot=True,
                      plot_median=True,
                      #limits=[10, 20, -1, 1],
                      )
def run_cloud_analysis(global_args,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']
    region = global_args['region']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'

    if region == 'east':
	    hi_filename = '/d/bip2/DR2W_v1/Narrow/' + \
		    'GALFA_HI_RA+DEC_060.00+26.35_N.fits'
	    hi_dr1_filename = '/d/bip3/ezbc/galfa/DR1/' + \
		    'GALFA_HI_RA+DEC_060.00+26.35_N.fits'
    else:
        hi_filename = '/d/bip2/DR2W_v1/Narrow/' + \
          'GALFA_HI_RA+DEC_052.00+26.35_N.fits'
        hi_dr1_filename = '/d/bip3/ezbc/galfa/DR1/' + \
		    'GALFA_HI_RA+DEC_052.00+26.35_N.fits'

    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)

    hi_vel_axis = make_velocity_axis(hi_header)
    velocity_range = [-5, 15]

    # use the vel range to derive N(HI)
    nhi_image = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    plot_kwargs = {
                   'figure_dir': figure_dir,
                   'cloud_name': cloud_name,
                   'filename_base': filename_base,
                   'plot_diagnostics': global_args['plot_diagnostics'],
                   #'av_nhi_contour': av_nhi_contour,
                   'av_nhi_contour': True,
                   'av_nhi_limits': [0, 20, -1, 9],
                   #'av_nhi_limits': None,
                    }

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals_perseus_' + region + '.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(nhi_image=nhi_image / nhi_image_dr1,
                   header=hi_header,
                   limits=None,
                   filename=filename,
                   show=0,
                   cb_text='DR2 / DR1'
                   #hi_vlimits=None,
                   )
def main(
    dgr=None, vel_range=None, vel_range_type="single", region=None, av_data_type="planck", use_binned_images=False
):
    """ Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    """

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = "_bin"
    else:
        bin_string = ""

    # Name of noise cube
    noise_cube_filename = "california_hi_galfa_cube_regrid_planckres_noise" + bin_string + ".fits"

    # Name of property files results are written to
    prop_file = "california_global_properties_" + av_data_type + "_scaled"

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {"wcs": (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))), "pixel": ()}
    elif region == 2:
        region_limit = {"wcs": (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))), "pixel": ()}
    elif region == 3:
        region_limit = {"wcs": (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))), "pixel": ()}
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = "/d/bip3/ezbc/california/data/python_output/nhi_av/"
    figure_dir = "/d/bip3/ezbc/california/figures/av/"
    av_dir = "/d/bip3/ezbc/california/data/av/"
    hi_dir = "/d/bip3/ezbc/california/data/hi/"
    co_dir = "/d/bip3/ezbc/california/data/co/"
    core_dir = "/d/bip3/ezbc/california/data/python_output/core_properties/"
    property_dir = "/d/bip3/ezbc/california/data/python_output/"
    region_dir = "/d/bip3/ezbc/california/data/python_output/ds9_regions/"

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == "lee12_2mass":
        print ("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(
            av_dir + "california_av_lee12_2mass_regrid_planckres" + bin_string + ".fits", return_header=True
        )
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == "lee12_iris":
        print ("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(
            av_dir + "california_av_lee12_iris_regrid_planckres" + bin_string + ".fits", return_header=True
        )
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
        print ("\nLoading Planck data...")
        av_image, av_header = load_fits(
            av_dir + "california_av_planck_5arcmin" + bin_string + ".fits", return_header=True
        )

        av_image_error, av_error_header = load_fits(
            av_dir + "california_av_error_planck_5arcmin" + bin_string + ".fits", return_header=True
        )

    hi_cube, hi_header = load_fits(
        hi_dir + "california_hi_galfa_cube_regrid_planckres" + bin_string + ".fits", return_header=True
    )

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename, return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(
            co_dir + "california_co_cfa_cube_regrid_planckres" + bin_string + ".fits", return_header=True
        )

    # Load global properties of cloud
    # global properties written from script
    # 'av/california_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += "_region{0:.0f}".format(region)
        results_filename += "_region{0:.0f}".format(region)

    print ("\nReading global parameter file\n" + prop_file + ".txt")
    with open(property_dir + prop_file + ".txt", "r") as f:
        props = json.load(f)

    if vel_range is not None:
        props["hi_velocity_range"] = vel_range
    else:
        vel_width = props["hi_velocity_width_max"]["value"]
        vel_center = np.array(props["hi_velocity_center"]["value"])
        vel_center = -4.0
        vel_range = (vel_center - vel_width / 2.0, vel_center + vel_width / 2.0)
    if dgr is not None:
        props["dust2gas_ratio_max"]["value"] = dgr
    else:
        dgr = props["dust2gas_ratio_max"]["value"]
        intercept = props["intercept_max"]["value"]

    fit_params = {}
    fit_params["dgr"] = dgr
    fit_params["intercept"] = intercept

    # define core properties
    with open(core_dir + "california_core_properties.txt", "r") as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores, filename_base=region_dir + "california_av_boxes_", header=hi_header)

    # create nhi image
    nhi_image = calculate_nhi(
        cube=hi_cube, velocity_axis=velocity_axis, velocity_range=vel_range, header=hi_header, noise_cube=hi_noise_cube
    )

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == "single":
        print ("\nHI velocity integration range:")
        print ("%.1f to %.1f km/s" % (vel_range[0], vel_range[1]))
    elif vel_range_type == "multiple":
        print ("\nHI velocity integration ranges:")
        for i in xrange(0, vel_range.shape[0]):
            print ("%.1f to %.1f km/s" % (vel_range[i, 0], vel_range[i, 1]))

    print ("\nDGR:")
    print ("%.2f x 10^-20 cm^2 mag" % (dgr))

    print ("\nIntercept:")
    print ("%.2f mag" % (intercept))

    # Get mask and mask images
    mask = np.asarray(props["mask" + bin_string])

    mask_images = 1

    if mask_images:
        av_image[mask] = np.nan
        nhi_image[mask] = np.nan
        av_image_error[mask] = np.nan
        av_model[mask] = np.nan

    indices = (np.isnan(av_model)) & (np.isnan(av_image)) & (np.isnan(av_image_error))

    if 1:
        import matplotlib.pyplot as plt

        plt.imshow(av_image)
        plt.show()

    print ("\nTotal number of pixels after masking = " + str(props["npix"]))

    # Plot
    figure_types = ["png", "pdf"]
    for figure_type in figure_types:
        if region is None:
            filename = "california_av_vs_nhi_" + av_data_type + bin_string

        filename = figure_dir + filename + "." + figure_type

        print ("\nSaving Av model image to \n" + filename)

        plot_av_vs_nhi(
            nhi_image,
            av_image,
            av_error=av_image_error,
            # limits=[10**-1, 10**1.9, 10**0, 10**1.7],
            fit_params=fit_params,
            limits=[5, 40, -0.2, 2],
            # limits=[0,30,0,10],
            gridsize=(10, 10),
            # scale=('log', 'log'),
            # scale=('linear', 'linear'),
            filename=filename,
            contour_plot=not use_binned_images,
            std=0.22,
        )
def calc_likelihoods(
        hi_cube=None,
        hi_vel_axis=None,
        av_image=None,
        av_image_error=None,
        vel_center=None,
        vel_widths=None,
        dgrs=None,
        plot_results=False,
        results_filename='',
        return_likelihoods=True,
        likelihood_filename=None,
        clobber=False,
        conf=0.68,
        threshold_delta_dgr=0.0005,
        ):

    '''
    Parameters
    ----------

    Returns
    -------
    hi_vel_range : tuple
        Lower and upper bound of HI velocity range in km/s which provides the
        best likelihoodelated N(HI) distribution with Av.
    likelihoods : array-like, optional
        Array of Pearson likelihoodelation coefficients likelihoodesponding to each
        permutation through the velocity centers and velocity widths.

    '''

    import numpy as np
    from myimage_analysis import calculate_nhi
    from os import path
    from astropy.io import fits

    # Check if likelihood grid should be derived
    if likelihood_filename is not None:
        if not path.isfile(likelihood_filename):
            perform_mle = True
            write_mle = True
        elif clobber:
            perform_mle = True
            write_mle = True
        else:
            perform_mle = False
            write_mle = False
    # If no filename provided, do not read file and do not write file
    else:
        write_mle = False
        perform_mle = True

    if perform_mle:
        # calculate the likelihoodelation coefficient for each velocity
        # range
        likelihoods = np.zeros((len(vel_widths),
                                len(dgrs)))

        # Progress bar parameters
        total = float(likelihoods.size)
        count = 0

        for j, vel_width in enumerate(vel_widths):
            # Construct N(HI) image outside of DGR loop, then apply
            # DGRs in loop
            vel_range = (vel_center - vel_width / 2.,
                         vel_center + vel_width / 2.)

            nhi_image = calculate_nhi(cube=hi_cube,
                                      velocity_axis=hi_vel_axis,
                                      velocity_range=vel_range,
                                      return_nhi_error=False)

            # Cycle through DGR to estimate error
            for k, dgr in enumerate(dgrs):
                # Create model of Av with N(HI) and DGR
                av_image_model = nhi_image * dgr
                av_image_model_error = nhi_image * dgr

                logL = calc_logL(av_image_model,
                                 av_image,
                                 data_error=av_image_error)

                likelihoods[j, k] = logL

                # Shows progress each 10%
                count += 1
                abs_step = int((total * 1)/10) or 10
                if count and not count % abs_step:
                    print "\t{0:.0%} processed".format(count/total)

    # Load file of likelihoods
    elif not perform_mle:
        print('Reading likelihood grid file:')
        print(likelihood_filename)

        hdu = fits.open(likelihood_filename)
        likelihoods = hdu[0].data

        if len(vel_widths) != likelihoods.shape[0] or \
           len(dgrs) != likelihoods.shape[1]:
            raise ValueError('Specified parameter grid not the same as in' + \
                    'loaded data likelihoods.')

        likelihoods = np.ma.array(likelihoods,
                mask=(likelihoods != likelihoods))

    # Normalize the log likelihoods
    likelihoods -= likelihoods.max()

    # Convert to likelihoods
    likelihoods = np.exp(likelihoods)

    # Normalize the likelihoods
    likelihoods = likelihoods / np.nansum(likelihoods)

    # Derive marginal distributions of both centers and widths
    width_likelihood = np.sum(likelihoods, axis=1) / \
            np.sum(likelihoods)
    dgr_likelihood = np.sum(likelihoods, axis=0) / \
            np.sum(likelihoods)

    # Derive confidence intervals of parameters
    width_confint = threshold_area(vel_widths,
                                   width_likelihood,
                                   area_fraction=conf)
    dgr_confint = threshold_area(dgrs,
                                 dgr_likelihood,
                                 area_fraction=conf)

    # Get values of best-fit model parameters
    max_loc = np.where(likelihoods == np.max(likelihoods))
    width_max = vel_widths[max_loc[0]][0]
    dgr_max = dgrs[max_loc[1]][0]

    print('\nVelocity widths = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(width_confint[0],
                                                    width_confint[2],
                                                    np.abs(width_confint[1])))
    print('\nDGRs = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} 10^20 cm^2 mag'.format(dgr_confint[0],
                                                    dgr_confint[2],
                                                    np.abs(dgr_confint[1])))

    # Write PDF
    upper_lim = (vel_center + width_confint[0]/2.)
    lower_lim = (vel_center - width_confint[0]/2.)
    upper_lim_error = width_confint[2]**2
    lower_lim_error = width_confint[1]**2

    vel_range_confint = (lower_lim, upper_lim, lower_lim_error, upper_lim_error)
    vel_range_max = (vel_center - width_max/2.0, vel_center + width_max/2.0)

    if not return_likelihoods:
        return vel_range_confint, dgr_confint
    else:
        return (vel_range_confint, width_confint, dgr_confint, likelihoods,
                width_likelihood, dgr_likelihood, width_max, dgr_max,
                vel_range_max)
def plot_nhi_maps(results_dict,
                  limits=None,
                  cube_data=None,
                  header=None,
                  load_synthetic_cube=False,
                  show=False,
                  velocity_range=[0, 500],
                  save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG_BASE = DIR_FIG + 'nhi_map_data_synth'

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(
        cube=cube_data,
        velocity_axis=velocity_axis,
        velocity_range=velocity_range,
    )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_dict['velocity_axis']
    if not load_synthetic_cube:
        print('\nCreating synthetic cube...')
        cube_synthetic = create_synthetic_cube(results_dict, cube_data)

        np.save(FILENAME_CUBE_SYNTH, cube_synthetic)
    else:
        print('\nLoading synthetic cube...')
        cube_synthetic = np.load(FILENAME_CUBE_SYNTH)

    # Create N(HI) synthetic
    nhi_synthetic = myia.calculate_nhi(
        cube=cube_synthetic,
        velocity_axis=velocity_axis,
        velocity_range=velocity_range,
    )

    v_limits = [0, np.max(nhi_data)]
    v_limits = [-1, 41]

    if 0:
        import matplotlib.pyplot as plt
        plt.close()
        plt.clf()
        fig, axes = plt.subplots(2, 1)
        axes[0].imshow(nhi_data, origin='lower')
        axes[1].imshow(nhi_synthetic, origin='lower')
        plt.show()

    if save_pdf:
        ext = '.pdf'
    else:
        ext = '.png'
    filename_fig = FILENAME_FIG_BASE + ext
    print('\nPlotting N(HI) maps...')
    print(filename_fig)
    # Plot the maps together
    plot_nhi_maps(
        nhi_data,
        nhi_synthetic,
        header=header,
        #limits=[278, -37, 282, -35],
        limits=limits,
        filename=filename_fig,
        nhi_1_vlimits=v_limits,
        nhi_2_vlimits=v_limits,
        show=show,
        vscale='linear',
    )
def plot_cluster_vel_panels(results_ref=None,
                            colors=None,
                            limits=None,
                            cube=None,
                            header=None,
                            load_synthetic_cube=False,
                            show=False,
                            velocity_range=[0, 500],
                            save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_vel_map_panels, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG = DIR_FIG + 'vel_maps_components.png'
    if save_pdf:
        FILENAME_FIG = FILENAME_FIG.replace('.png', '.pdf')

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH_BASE = DIR_HI + 'cube_synth_comp'

    # Create synthetic cube from fitted spectra
    velocity_axis = results_ref['velocity_axis']

    # get number of unique components
    component_colors = np.unique(colors)
    n_components = len(component_colors)

    vel_list = []
    nhi_list = []
    vel_max = 0.0
    for i in xrange(n_components):
        if not load_synthetic_cube:
            print('\n\tCreating synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))

            # get the relevant parameters
            indices = np.where(colors == component_colors[i])[0]
            pix_positions = results_ref['pos_pix'][indices]
            fit_params_list = results_ref['data'][indices, 2:]

            print('\n\t\tNumber of components in cube: ' + \
                  '{0:.0f}'.format(len(fit_params_list)))

            cube_synthetic = \
                create_synthetic_cube(pix_positions=pix_positions,
                                      velocity_axis=velocity_axis,
                                      fit_params_list=fit_params_list,
                                      cube_data=cube,
                                      )

            np.save(FILENAME_CUBE_SYNTH_BASE + str(i) + '.npy', cube_synthetic)
        else:
            print('\n\tLoading synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))
            cube_synthetic = np.load(FILENAME_CUBE_SYNTH_BASE + str(i) +
                                     '.npy')

        # Create N(HI) synthetic
        vel_synthetic = myia.calculate_moment(
            cube_synthetic,
            moment=1,
            weights=velocity_axis,
        )
        nhi_synthetic = myia.calculate_nhi(
            cube=cube_synthetic,
            velocity_axis=velocity_axis,
            velocity_range=velocity_range,
        )

        vel_list.append(vel_synthetic)
        nhi_list.append(nhi_synthetic)

        vel_max_temp = np.max(vel_synthetic)
        if vel_max_temp > vel_max:
            vel_max = vel_max_temp

    # crop to highest valued cubes
    n_left = 4
    sum_list = []
    n_left = len(nhi_list)
    for nhi in nhi_list:
        sum_list.append(np.nansum(nhi))
    sort_indices = np.argsort(sum_list)[::-1]
    new_list = []
    for i in xrange(n_left):
        new_list.append(vel_list[sort_indices[i]])
    vel_list = new_list

    # value limits
    v_limits = [0, vel_max]

    # Plot the maps together

    plot_vel_map_panels(
        vel_list,
        header=header,
        #limits=[278, -37, 282, -35],
        limits=limits,
        filename=FILENAME_FIG,
        #vel_vlimits=,
        show=show,
        vscale='linear',
    )
예제 #36
0
def calc_likelihoods(
        hi_cube=None,
        hi_vel_axis=None,
        av_image=None,
        av_image_error=None,
        vel_center=None,
        vel_widths=None,
        dgrs=None,
        plot_results=False,
        results_filename='',
        return_likelihoods=True,
        likelihood_filename=None,
        clobber=False,
        conf=0.68,
        threshold_delta_dgr=0.0005,
        ):

    '''
    Parameters
    ----------

    Returns
    -------
    hi_vel_range : tuple
        Lower and upper bound of HI velocity range in km/s which provides the
        best likelihoodelated N(HI) distribution with Av.
    likelihoods : array-like, optional
        Array of Pearson likelihoodelation coefficients likelihoodesponding to each
        permutation through the velocity centers and velocity widths.

    '''

    import numpy as np
    from myimage_analysis import calculate_nhi
    from os import path
    from astropy.io import fits

    # Check if likelihood grid should be derived
    if likelihood_filename is not None:
        if not path.isfile(likelihood_filename):
            perform_mle = True
            write_mle = True
        elif clobber:
            perform_mle = True
            write_mle = True
        else:
            perform_mle = False
            write_mle = False
    # If no filename provided, do not read file and do not write file
    else:
        write_mle = False
        perform_mle = True

    if perform_mle:
        # calculate the likelihoodelation coefficient for each velocity
        # range
        likelihoods = np.zeros((len(vel_widths),
                                len(dgrs)))

        # Progress bar parameters
        total = float(likelihoods.size)
        count = 0

        for j, vel_width in enumerate(vel_widths):
            # Construct N(HI) image outside of DGR loop, then apply
            # DGRs in loop
            vel_range = (vel_center - vel_width / 2.,
                         vel_center + vel_width / 2.)

            nhi_image = calculate_nhi(cube=hi_cube,
                                      velocity_axis=hi_vel_axis,
                                      velocity_range=vel_range,
                                      return_nhi_error=False)

            # Cycle through DGR to estimate error
            for k, dgr in enumerate(dgrs):
                # Create model of Av with N(HI) and DGR
                av_image_model = nhi_image * dgr
                av_image_model_error = nhi_image * dgr

                logL = calc_logL(av_image_model,
                                 av_image,
                                 data_error=av_image_error)

                likelihoods[j, k] = logL

                # Shows progress each 10%
                count += 1
                abs_step = int((total * 1)/10) or 10
                if count and not count % abs_step:
                    print "\t{0:.0%} processed".format(count/total)

    # Load file of likelihoods
    elif not perform_mle:
        print('Reading likelihood grid file:')
        print(likelihood_filename)

        hdu = fits.open(likelihood_filename)
        likelihoods = hdu[0].data

        if len(vel_widths) != likelihoods.shape[0] or \
           len(dgrs) != likelihoods.shape[1]:
            raise ValueError('Specified parameter grid not the same as in' + \
                    'loaded data likelihoods.')

        likelihoods = np.ma.array(likelihoods,
                mask=(likelihoods != likelihoods))

    # Normalize the log likelihoods
    likelihoods -= likelihoods.max()

    # Convert to likelihoods
    likelihoods = np.exp(likelihoods)

    # Normalize the likelihoods
    likelihoods = likelihoods / np.nansum(likelihoods)

    # Derive marginal distributions of both centers and widths
    width_likelihood = np.sum(likelihoods, axis=1) / \
            np.sum(likelihoods)
    dgr_likelihood = np.sum(likelihoods, axis=0) / \
            np.sum(likelihoods)

    # Derive confidence intervals of parameters
    width_confint = threshold_area(vel_widths,
                                   width_likelihood,
                                   area_fraction=conf)
    dgr_confint = threshold_area(dgrs,
                                 dgr_likelihood,
                                 area_fraction=conf)

    # Get values of best-fit model parameters
    max_loc = np.where(likelihoods == np.max(likelihoods))
    width_max = vel_widths[max_loc[0]][0]
    dgr_max = dgrs[max_loc[1]][0]

    print('\nVelocity widths = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(width_confint[0],
                                                    width_confint[2],
                                                    np.abs(width_confint[1])))
    print('\nDGRs = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} 10^20 cm^2 mag'.format(dgr_confint[0],
                                                    dgr_confint[2],
                                                    np.abs(dgr_confint[1])))

    # Write PDF
    upper_lim = (vel_center + width_confint[0]/2.)
    lower_lim = (vel_center - width_confint[0]/2.)
    upper_lim_error = width_confint[2]**2
    lower_lim_error = width_confint[1]**2

    vel_range_confint = (lower_lim, upper_lim, lower_lim_error, upper_lim_error)
    vel_range_max = (vel_center - width_max/2.0, vel_center + width_max/2.0)

    if not return_likelihoods:
        return vel_range_confint, dgr_confint
    else:
        return (vel_range_confint, width_confint, dgr_confint, likelihoods,
                width_likelihood, dgr_likelihood, width_max, dgr_max,
                vel_range_max)
def calc_likelihood_hi_av(#hi_cube=None, hi_velocity_axis=None,
        #hi_noise_cube=None, av_image=None, av_image_error=None,
        velocity_centers=None, velocity_widths=None, return_likelihoods=True,
        dgrs=None, plot_results=True, results_filename='',
        vel_center_image=None, likelihood_filename=None, clobber=False,
        conf=0.68, contour_confs=None, multithread=False):

    '''
    Parameters
    ----------

    Returns
    -------
    hi_vel_range : tuple
        Lower and upper bound of HI velocity range in km/s which provides the
        best likelihoodelated N(HI) distribution with Av.
    likelihoods : array-like, optional
        Array of Pearson likelihoodelation coefficients likelihoodesponding to each
        permutation through the velocity centers and velocity widths.

    '''

    import numpy as np
    from scipy.stats import pearsonr
    from scipy.stats import kendalltau
    from myimage_analysis import calculate_nhi
    from scipy import signal
    from os import path
    from astropy.io import fits
    import multiprocessing
    import itertools

    # Check if likelihood grid should be derived
    if likelihood_filename is not None:
        if not path.isfile(likelihood_filename):
            perform_mle = True
            write_mle = True
        elif clobber:
            perform_mle = True
            write_mle = True
        else:
            perform_mle = False
            write_mle = False
    # If no filename provided, do not read file and do not write file
    else:
        write_mle = False
        perform_mle = True

    if perform_mle:
        if multithread:
            print('\nUsing multithreading in likelihood claculation...')
            # calculate the velocity ranges given a set of centers and widths
            velocity_ranges = np.zeros(shape=[len(velocity_centers) * \
                    len(velocity_widths),2])
            count = 0
            for i, center in enumerate(velocity_centers):
                for j, width in enumerate(velocity_widths):
                    velocity_ranges[count, 0] = center - width/2.
                    velocity_ranges[count, 1] = center + width/2.
                    count += 1

            # Set up iterable whereby each row contains the parameter values
            mesh = setup_likelihood_mesh(velocity_centers,
                                         velocity_widths,
                                         dgrs)

            # Use multiple processors to iterate through parameter
            # combinations

            p = multiprocessing.Pool()
            likelihoods = p.map(search_likelihoods, mesh)
            p.close()

            # reshape likelihoods
            likelihoods = reshape_likelihoods(likelihoods,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                dgrs=dgrs)


        else:
            # calculate the likelihoodelation coefficient for each velocity
            # range
            likelihoods = np.zeros((len(velocity_centers),
                                     len(velocity_widths),
                                     len(dgrs)))

            # Progress bar parameters
            total = float(likelihoods.size)
            count = 0

            for i, velocity_center in enumerate(velocity_centers):
                for j, velocity_width in enumerate(velocity_widths):
                    # Construct N(HI) image outside of DGR loop, then apply
                    # DGRs in loop

                    if vel_center_image is None:
                        velocity_range = (velocity_center-velocity_width / 2.,
                                          velocity_center+velocity_width / 2.)
                    else:
                        velocity_range = (vel_center_image - velocity_width/2.,
                                          vel_center_image + velocity_width/2.)

                    if 0:
                        velocity_range = np.array(velocity_range)
                        import matplotlib.pyplot as plt
                        plt.close(); plt.clf()
                        plt.imshow(velocity_range[0, :, :], origin='lower')
                        plt.show()

                    nhi_image_temp = \
                            calculate_nhi(cube=hi_cube,
                                velocity_axis=hi_velocity_axis,
                                velocity_range=velocity_range,
                                )

                    # Avoid NaNs
                    indices = np.where((nhi_image_temp == nhi_image_temp)&\
                                       (av_image == av_image))

                    nhi_image_likelihood = nhi_image_temp[indices]
                    av_image_likelihood = av_image[indices]
                    if type(av_image_error) != float:
                        av_image_error_likelihood = av_image_error[indices]
                    else:
                        av_image_error_likelihood = av_image_error

                    for k, dgr in enumerate(dgrs):
                        # Create model of Av with N(HI) and DGR
                        av_image_model = nhi_image_likelihood * dgr

                        logL = calc_logL(av_image_model,
                                         av_image_likelihood,
                                         data_error=av_image_error_likelihood)

                        likelihoods[i, j, k] = logL

                        # Shows progress each 10%
                        count += 1
                        abs_step = int((total * 1)/10) or 10
                        if count and not count % abs_step:
                            print "\t{0:.0%} processed".format(count/total)

        #likelihoods /= (av_image_model.size)

        # Normalize the log likelihoods
        likelihoods -= likelihoods.max()

        # Convert to likelihoods
        likelihoods = np.exp(likelihoods)

        # Normalize the likelihoods
        likelihoods = likelihoods / np.nansum(likelihoods)

        # Write out fits file of likelihoods
        if write_mle:
            write_mle_tofits(filename=likelihood_filename,
                             velocity_centers=velocity_centers,
                             velocity_widths=velocity_widths,
                             dgrs=dgrs,
                             likelihoods=likelihoods,
                             clobber=clobber)

    # Load file of likelihoods
    elif not perform_mle:
        print('Reading likelihood grid file:')
        print(likelihood_filename)

        hdu = fits.open(likelihood_filename)
        likelihoods = hdu[0].data

        if len(velocity_centers) != likelihoods.shape[0] or \
            len(velocity_widths) != likelihoods.shape[1]:
            raise ValueError('Specified parameter grid not the same as in' + \
                    'loaded data likelihoods.')

        likelihoods = np.ma.array(likelihoods,
                mask=(likelihoods != likelihoods))

    # Define parameter resolutions
    #delta_center = velocity_centers[1] - velocity_centers[0]
    #delta_width = velocity_widths[1] - velocity_widths[0]

    # Derive marginal distributions of both centers and widths
    center_likelihood = np.sum(likelihoods, axis=(1,2)) / \
            np.sum(likelihoods)
    width_likelihood = np.sum(likelihoods, axis=(0,2)) / \
            np.sum(likelihoods)
    dgr_likelihood = np.sum(likelihoods, axis=(0,1)) / \
            np.sum(likelihoods)

    # Derive confidence intervals of parameters
    center_confint = threshold_area(velocity_centers,
                                    center_likelihood,
                                    area_fraction=conf)
    width_confint = threshold_area(velocity_widths,
                                   width_likelihood,
                                   area_fraction=conf)
    dgr_confint = threshold_area(dgrs,
                                 dgr_likelihood,
                                 area_fraction=conf)

    # Get values of best-fit model parameters
    max_loc = np.where(likelihoods == np.max(likelihoods))
    center_max = velocity_centers[max_loc[0]][0]
    width_max = velocity_widths[max_loc[1]][0]
    dgr_max = dgrs[max_loc[2]][0]

    print('\nVelocity widths = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(width_confint[0],
                                                    width_confint[2],
                                                    np.abs(width_confint[1])))
    print('\nVelocity centers = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(center_confint[0],
                                                    center_confint[2],
                                                    np.abs(center_confint[1])))
    print('\nDGRs = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} 10^20 cm^2 mag'.format(dgr_confint[0],
                                                    dgr_confint[2],
                                                    np.abs(dgr_confint[1])))

    # Write PDF
    center = center_confint[0]
    upper_lim = (center_confint[0] + width_confint[0]/2.)
    lower_lim = (center_confint[0] - width_confint[0]/2.)
    upper_lim_error = (center_confint[2]**2 + width_confint[2]**2)**0.5
    lower_lim_error = (center_confint[1]**2 + width_confint[1]**2)**0.5

    vel_range_confint = (lower_lim, upper_lim, lower_lim_error,
            upper_lim_error)

    '''
    if plot_results:
        plot_likelihoods_hist(likelihoods,
                              velocity_widths,
                              velocity_centers,
                              x_confint=width_confint,
                              y_confint=center_confint,
                              plot_axes=('widths', 'centers'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_wc.png',
                              contour_confs=contour_confs)
        plot_likelihoods_hist(likelihoods,
                              velocity_centers,
                              dgrs,
                              x_confint=center_confint,
                              y_confint=dgr_confint,
                              plot_axes=('centers', 'dgrs'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_cd.png',
                              contour_confs=contour_confs)
        plot_likelihoods_hist(likelihoods,
                              velocity_widths,
                              dgrs,
                              x_confint=width_confint,
                              y_confint=dgr_confint,
                              plot_axes=('widths', 'dgrs'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_wd.png',
                              contour_confs=contour_confs)
    '''

    if not return_likelihoods:
        return vel_range_confint, dgr_confint
    else:
        return (vel_range_confint, width_confint, dgr_confint, likelihoods,
            center_likelihood, width_likelihood, dgr_likelihood, center_max,
            width_max, dgr_max)
def plot_cluster_vel_panels(results_ref=None, colors=None, limits=None,
        cube=None, header=None, load_synthetic_cube=False, show=False,
        velocity_range=[0,500], save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_vel_map_panels, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG = DIR_FIG + 'vel_maps_components.png'
    if save_pdf:
        FILENAME_FIG = FILENAME_FIG.replace('.png','.pdf')

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH_BASE = DIR_HI + 'cube_synth_comp'

    # Create synthetic cube from fitted spectra
    velocity_axis = results_ref['velocity_axis']

    # get number of unique components
    component_colors = np.unique(colors)
    n_components = len(component_colors)

    vel_list = []
    nhi_list = []
    vel_max = 0.0
    for i in xrange(n_components):
        if not load_synthetic_cube:
            print('\n\tCreating synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))

            # get the relevant parameters
            indices = np.where(colors == component_colors[i])[0]
            pix_positions = results_ref['pos_pix'][indices]
            fit_params_list = results_ref['data'][indices, 2:]

            print('\n\t\tNumber of components in cube: ' + \
                  '{0:.0f}'.format(len(fit_params_list)))

            cube_synthetic = \
                create_synthetic_cube(pix_positions=pix_positions,
                                      velocity_axis=velocity_axis,
                                      fit_params_list=fit_params_list,
                                      cube_data=cube,
                                      )

            np.save(FILENAME_CUBE_SYNTH_BASE + str(i) + '.npy', cube_synthetic)
        else:
            print('\n\tLoading synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))
            cube_synthetic = np.load(FILENAME_CUBE_SYNTH_BASE + str(i) + '.npy')

        # Create N(HI) synthetic
        vel_synthetic = myia.calculate_moment(cube_synthetic,
                                              moment=1,
                                              weights=velocity_axis,
                                              )
        nhi_synthetic = myia.calculate_nhi(cube=cube_synthetic,
                                           velocity_axis=velocity_axis,
                                           velocity_range=velocity_range,
                                           )

        vel_list.append(vel_synthetic)
        nhi_list.append(nhi_synthetic)

        vel_max_temp = np.max(vel_synthetic)
        if vel_max_temp > vel_max:
            vel_max = vel_max_temp

    # crop to highest valued cubes
    n_left = 4
    sum_list = []
    n_left = len(nhi_list)
    for nhi in nhi_list:
        sum_list.append(np.nansum(nhi))
    sort_indices = np.argsort(sum_list)[::-1]
    new_list = []
    for i in xrange(n_left):
        new_list.append(vel_list[sort_indices[i]])
    vel_list = new_list

    # value limits
    v_limits = [0, vel_max]

    # Plot the maps together

    plot_vel_map_panels(vel_list,
                        header=header,
                        #limits=[278, -37, 282, -35],
                        limits=limits,
                        filename=FILENAME_FIG,
                        #vel_vlimits=,
                        show=show,
                        vscale='linear',
                        )
예제 #39
0
    def test_calc_likelihoods_2():
        from numpy.testing import assert_array_almost_equal
        from numpy.testing import assert_almost_equal
        from myimage_analysis import calculate_nhi
        import matplotlib.pyplot as plt
        from matplotlib import cm
        from mpl_toolkits.axes_grid1 import ImageGrid

        av_image = np.array([[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 2, 1, 0],
                             [np.nan, 1, 1, 1, 0], [0, 0, 0, 0, 0]])

        #av_image_error = np.random.normal(0.1, size=av_image.shape)
        av_image_error = 0.1 * np.ones(av_image.shape)

        #nhi_image = av_image + np.random.normal(0.1, size=av_image.shape)
        hi_cube = np.zeros((5, 5, 5))

        # make inner channels correlated with av
        hi_cube[:, :, :] = np.array([
            [
                [1., 0., 0., 0., 0.],
                [np.nan, 0., 0., 0., 0.],
                [0., 0., 0., 0., 0.],
                [0., 0., 0., 0., 0.],
                [1., 0., 0., 0., 10.],
            ],
            [
                [0., 0., 0., 0., 0.],
                [0., 0., 2., 0., 0.],
                [0., 0., 4., 0., 0.],
                [0., 0., 2., 0., 0.],
                [0., 0., 0., 0., 0.],
            ],
            [
                [0., 0., 0., 0., 0.],
                [0., 0., 0., 2., 0.],
                [0., 0., 0., 2., 0.],
                [0., 0., 0., 2., np.nan],
                [0., 0., 0., 0., 0.],
            ],
            [
                [0., 0., 0., 0., 0.],
                [0., 2., 0., 0., 0.],
                [0., 2., 0., 0., 0.],
                [0., 2., 0., 0., 0.],
                [0., 0., 0., 0., 0.],
            ],
            [
                [0., 0., 0., 0., 0.],
                [0., 0., 0., 0., np.nan],
                [0., 0., 0., 0., 0.],
                [0., 0., 0., 0., 0.],
                [1., 0., 0., 0., 0.2],
            ],
        ])

        if 1:
            fig = plt.figure(figsize=(4, 4))
            imagegrid = ImageGrid(fig, (1, 1, 1),
                                  nrows_ncols=(1, 5),
                                  ngrids=5,
                                  cbar_mode="single",
                                  cbar_location='top',
                                  cbar_pad="2%",
                                  cbar_size='3%',
                                  axes_pad=0.1,
                                  aspect=True,
                                  label_mode='L',
                                  share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            for i in xrange(5):
                im = imagegrid[i].imshow(
                    hi_cube[i, :, :],
                    origin='lower',
                    #aspect='auto',
                    cmap=cmap,
                    interpolation='none',
                    vmin=0,
                    vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/hi_cube.png')

        # make edge channels totally uncorrelated
        #hi_cube[(0, 4), :, :] = np.arange(0, 25).reshape(5,5)
        #hi_cube[(0, 4), :, :] = - np.ones((5,5))

        hi_vel_axis = np.arange(0, 5, 1)

        # add intercept
        intercept_answer = 0.9
        av_image = av_image + intercept_answer

        if 1:
            fig = plt.figure(figsize=(4, 4))
            params = {
                'figure.figsize': (1, 1),
                #'figure.titlesize': font_scale,
            }
            plt.rcParams.update(params)
            imagegrid = ImageGrid(fig, (1, 1, 1),
                                  nrows_ncols=(1, 1),
                                  ngrids=1,
                                  cbar_mode="single",
                                  cbar_location='top',
                                  cbar_pad="2%",
                                  cbar_size='3%',
                                  axes_pad=0.1,
                                  aspect=True,
                                  label_mode='L',
                                  share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            im = imagegrid[0].imshow(
                av_image,
                origin='lower',
                #aspect='auto',
                cmap=cmap,
                interpolation='none',
                vmin=0,
                vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/av.png')

        width_grid = np.arange(0, 5, 1)
        dgr_grid = np.arange(0, 1, 0.1)
        intercept_grid = np.arange(-1, 1, 0.1)
        vel_center = 2

        results = \
            cloudpy._calc_likelihoods(
                              hi_cube=hi_cube / 1.832e-2,
                              hi_vel_axis=hi_vel_axis,
                              vel_center=vel_center,
                              av_image=av_image,
                              av_image_error=av_image_error,
                              width_grid=width_grid,
                              dgr_grid=dgr_grid,
                              intercept_grid=intercept_grid,
                              )

        dgr_answer = 1 / 2.0
        width_answer = 2
        width = results['width_max']
        dgr = results['dgr_max']
        intercept = results['intercept_max']
        print width

        if 0:
            width = width_answer
            intercept = intercept_answer
            dgr = dgr_answer

        vel_range = (vel_center - width / 2.0, vel_center + width / 2.0)

        nhi_image = calculate_nhi(cube=hi_cube,
                                  velocity_axis=hi_vel_axis,
                                  velocity_range=vel_range) / 1.823e-2
        if 1:
            fig = plt.figure(figsize=(4, 4))
            imagegrid = ImageGrid(fig, (1, 1, 1),
                                  nrows_ncols=(1, 1),
                                  ngrids=1,
                                  cbar_mode="single",
                                  cbar_location='top',
                                  cbar_pad="2%",
                                  cbar_size='3%',
                                  axes_pad=0.1,
                                  aspect=True,
                                  label_mode='L',
                                  share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            im = imagegrid[0].imshow(
                nhi_image,
                origin='lower',
                #aspect='auto',
                cmap=cmap,
                interpolation='none',
                vmin=0,
                vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/nhi.png')
        if 1:
            fig = plt.figure(figsize=(4, 4))
            imagegrid = ImageGrid(fig, (1, 1, 1),
                                  nrows_ncols=(1, 1),
                                  ngrids=1,
                                  cbar_mode="single",
                                  cbar_location='top',
                                  cbar_pad="2%",
                                  cbar_size='3%',
                                  axes_pad=0.1,
                                  aspect=True,
                                  label_mode='L',
                                  share_all=True)
            cmap = cm.get_cmap('Greys', 5)
            im = imagegrid[0].imshow(
                nhi_image * dgr + intercept,
                origin='lower',
                #aspect='auto',
                cmap=cmap,
                interpolation='none',
                vmin=0,
                vmax=4)
            #cb = imagegrid[i].cax.colorbar(im)
            cbar = imagegrid.cbar_axes[0].colorbar(im)
            #plt.title('HI Cube')
            plt.savefig('/usr/users/ezbc/Desktop/av_model.png')

        print('residuals = ')
        print(av_image - (nhi_image * dgr + intercept))
        print('dgr', dgr)
        print('intercept', intercept)
        print('width', width)

        assert_almost_equal(results['intercept_max'], intercept_answer)
        assert_almost_equal(results['dgr_max'], dgr_answer)
        assert_almost_equal(results['width_max'], width_answer)
예제 #40
0
def main(dgr=None, vel_range=None, vel_range_type='single', region=None,
        av_data_type='planck', use_binned_images=False):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Name of noise cube
    noise_cube_filename = \
            'taurus_hi_galfa_cube_regrid_planckres_noise' + bin_string + \
            '.fits'

    # Name of property files results are written to
    prop_file = 'taurus_global_properties_' + av_data_type + '_scaled'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
    	region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/taurus/figures/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    co_dir = '/d/bip3/ezbc/taurus/data/co/'
    core_dir = '/d/bip3/ezbc/taurus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/taurus/data/python_output/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
    	print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'taurus_av_error_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(co_dir + \
                    'taurus_co_cfa_cube_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)

    # Load global properties of cloud
    # global properties written from script
    # 'av/taurus_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nReading global parameter file\n' + prop_file + '.txt')
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']
    if dgr is not None:
        props['dust2gas_ratio']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio']['value']

    # define core properties
    with open(core_dir + 'taurus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'taurus_av_boxes_',
            header = hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    # Get mask and mask images
    mask = np.asarray(props['mask' + bin_string])

    mask_images = False
    av_image_masked = np.copy(av_image)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_error_masked = np.copy(av_image_error)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_model_masked = np.copy(av_model)
    #av_model_masked[(mask == 1) & (region_mask == 1)] = np.nan

    if mask_images:
        av_image_masked[mask] = np.nan
        av_error_masked[mask] = np.nan
        av_model_masked[mask] = np.nan

    indices = ((np.isnan(av_model_masked)) & \
               (np.isnan(av_image_masked)) & \
               (np.isnan(av_image_error)))

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    if 0:
        import matplotlib.pyplot as plt
        av_plot_data = np.copy(av_image)
        av_plot_data[mask] = np.nan
        plt.imshow(av_plot_data, origin='lower')
        plt.xlim(props['plot_limit_bin']['pixel'][0:3:2])
        plt.ylim(props['plot_limit_bin']['pixel'][1:4:2])
        plt.show()

    # Create HI spectrum
    hi_cube[hi_cube != hi_cube] = 0
    hi_cube[:, mask] = 0
    hi_spectrum = np.mean(hi_cube, axis=(1,2))

    if not use_binned_images:
        # Derive CO spectrum
        co_data[:, mask] = 0
        co_data[np.isnan(co_data)] = 0
        co_spectrum = np.mean(co_data, axis=(1,2))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'single_vel_range/taurus_av_model_map_' + \
                    av_data_type + bin_string
                    #'dgr{0:.3f}_'.format(dgr) + \
                    #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                    #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/taurus_av_model_map_' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(vel_range[i, 0],
                                                              vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'taurus_av_model_map_region{0:.0f}'.format(region)

        print('\nSaving Av model image to \n' + figure_dir + filename + \
                '.' + figure_type)

        if 0:
            plot_av_model(av_image=av_image_masked,
                          av_model=av_model_masked,
                          header=av_header,
                          results=props,
                          limits=props['plot_limit' + bin_string]['pixel'],
                          savedir=figure_dir + 'maps/av_models/',
                          filename=filename + '.' + figure_type,
                          show=False)

        if 1:
            #if not use_binned_images:
            if 0:
                plot_av_model(av_image=av_image_masked,
                              av_model=av_model_masked,
                              header=av_header,
                              results=props,
                              hi_velocity_axis=velocity_axis,
                              vel_range=vel_range,
                              hi_spectrum=hi_spectrum,
                              #hi_limits=[-15, 25, -1, 10],
                              hi_limits=[-15, 25, None, None],
                              co_spectrum=co_spectrum,
                              co_velocity_axis=co_velocity_axis,
                              limits=props['plot_limit' + bin_string]['pixel'],
                              savedir=figure_dir + 'maps/av_models/',
                              filename=filename + '_spectra' + '.' + figure_type,
                              show=False)


            plot_avmod_vs_av((av_model_masked,),
                    (av_image_masked,),
                    av_errors=(av_error_masked,),
                    #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
                    limits=[0,20,0,3],
                    savedir=figure_dir + 'av/',
                    gridsize=(10,10),
                    #scale=('log', 'log'),
                    #scale=('linear', 'linear'),
                    filename='taurus_avmod_vs_av%s.%s' % (bin_string, figure_type),
                    show = False,
                    std=0.22,
                    )

        if 0:
            plot_power_spectrum(av_image_masked - av_model_masked,
                filename_prefix='taurus_av_resid_power_spectrum_' + \
                                '{0:s}'.format(av_data_type),
                filename_suffix='.{0:s}'.format(figure_type),
                savedir=figure_dir + 'power_spectra/',
                show=False)
def main():

    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube

    global hi_cube
    global hi_velocity_axis
    global hi_noise_cube
    global av_image
    global av_image_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Course, large grid or fine, small grid?
    grid_res = "fine"
    grid_res = "course"

    # Use multithreading?
    multithread = False

    # Use Av+CO mask or only CO?
    av_and_co_mask = True

    # Derive CO mask? If co_thres = None, co_thres will be 2 * std(co)
    co_thres = 6.00  # K km/s

    # Threshold of Av below which we expect only atomic gas, in mag
    av_thres = 1.4

    # Results and fits filenames
    if av_and_co_mask:
        likelihood_filename = "taurus_nhi_av_likelihoods_co_" + "av{0:.1f}mag".format(av_thres)
        results_filename = "taurus_likelihood_co_" + "av{0:.1f}mag".format(av_thres)
    else:
        likelihood_filename = "taurus_nhi_av_likelihoods_co_only"
        results_filename = "taurus_likelihood_co_only"

    # Name of property files results are written to
    global_property_file = "taurus_global_properties.txt"
    core_property_file = "taurus_core_properties.txt"

    # Name of noise cube
    noise_cube_filename = "taurus_hi_galfa_cube_regrid_planckres_noise.fits"

    # Define ranges of parameters
    if center_vary and width_vary and dgr_vary:
        likelihood_filename += "_width_dgr_center"
        results_filename += "_width_dgr_center"

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1e-2, 1, 2e-2)
    elif not center_vary and width_vary and dgr_vary:
        if grid_res == "course":
            likelihood_filename += "_dgr_width_lowres"
            results_filename += "_dgr_width_lowres"
            velocity_centers = np.arange(-5, 10, 10 * 0.16667)
            velocity_widths = np.arange(1, 30, 10 * 0.16667)
            dgrs = np.arange(0.05, 0.7, 2e-2)
        elif grid_res == "fine":
            likelihood_filename += "_dgr_width_highres"
            results_filename += "_dgr_width_highres"
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 100, 0.16667)
            dgrs = np.arange(0.15, 0.4, 1e-3)
            velocity_widths = np.arange(1, 15, 0.16667)
            dgrs = np.arange(0.1, 0.9, 3e-3)
            # velocity_widths = np.arange(1, 40, 1)
            # dgrs = np.arange(0.15, 0.4, 1e-1)
    elif center_vary and width_vary and not dgr_vary:
        likelihood_filename += "_width_center"
        results_filename += "_width_center"

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)
    elif not center_vary and width_vary and not dgr_vary:
        likelihood_filename += "_width"
        results_filename += "_width"

        velocity_centers = np.arange(5, 6, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)

    # define directory locations
    # --------------------------
    output_dir = "/d/bip3/ezbc/taurus/data/python_output/nhi_av/"
    figure_dir = "/d/bip3/ezbc/taurus/figures/hi_velocity_range/"
    av_dir = "/d/bip3/ezbc/taurus/data/av/"
    hi_dir = "/d/bip3/ezbc/taurus/data/hi/"
    co_dir = "/d/bip3/ezbc/taurus/data/co/"
    core_dir = "/d/bip3/ezbc/taurus/data/python_output/core_properties/"
    property_dir = "/d/bip3/ezbc/taurus/data/python_output/"
    region_dir = "/d/bip3/ezbc/taurus/data/python_output/ds9_regions/"
    likelihood_dir = "/d/bip3/ezbc/taurus/data/python_output/nhi_av/"

    # load Planck Av and GALFA HI images, on same grid
    av_data, av_header = load_fits(av_dir + "taurus_av_planck_5arcmin.fits", return_header=True)

    av_data_error, av_error_header = load_fits(av_dir + "taurus_av_error_planck_5arcmin.fits", return_header=True)

    hi_data, h = load_fits(hi_dir + "taurus_hi_galfa_cube_regrid_planckres.fits", return_header=True)

    co_data, co_header = load_fits(co_dir + "taurus_co_cfa_cube_regrid_planckres.fits", return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)
    co_velocity_axis = make_velocity_axis(co_header)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(
            cube=hi_data,
            velocity_axis=velocity_axis,
            velocity_noise_range=[90, 110],
            header=h,
            Tsys=30.0,
            filename=hi_dir + noise_cube_filename,
        )
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename, return_header=True)

    # define core properties
    with open(core_dir + core_property_file, "r") as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, "r") as f:
        global_props = json.load(f)

    # Change WCS coords to pixel coords of images
    cores = convert_core_coordinates(cores, h)
    cores = load_ds9_region(cores, filename_base=region_dir + "taurus_av_boxes_", header=h)
    global_props = convert_limit_coordinates(global_props, header=av_header)

    print ("\nCalculating likelihoods globally")

    co_data_nonans = np.copy(co_data)
    co_data_nonans[np.isnan(co_data_nonans)] = 0.0

    # Set velocity center as CO peak
    if not center_vary:
        co_spectrum = np.sum(co_data_nonans, axis=(1, 2))
        co_avg_vel = np.average(co_velocity_axis, weights=co_spectrum)
        co_peak_vel = co_velocity_axis[co_spectrum == np.max(co_spectrum)]
        # velocity_centers = np.arange(co_peak_vel, co_peak_vel + 1, 1)
        velocity_centers = np.arange(co_avg_vel, co_avg_vel + 1, 1)

        print ("\nVelocity center from CO = " + "{0:.2f} km/s".format(velocity_centers[0]))

    # Create mask where CO is present
    core_mask = np.zeros(av_data.shape)
    # for core in cores:
    #    # Grab the mask
    #    core_mask += myg.get_polygon_mask(av_data,
    #            cores[core]['box_vertices_rotated'])

    # Calc moment 0 map of CO
    co_mom0 = np.sum(co_data_nonans, axis=0)

    # calc noise without any emission if CO threshold not already set
    if co_thres is None:
        co_noise = calc_co_noise(co_mom0, global_props)
        co_thres = 2.0 * co_noise

    # Derive relevant region
    pix = global_props["region_limit"]["pixel"]
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]), (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_data, region_vertices)

    print ("\nRegion size = " + "{0:.0f} pix".format(region_mask[region_mask == 1].size))

    # Get indices which trace only atomic gas, i.e., no CO emission
    if av_and_co_mask:
        indices = ((co_mom0 < co_thres) & (av_data < av_thres)) & (region_mask == 1)
    elif not av_and_co_mask:
        indices = (co_mom0 < co_thres) & (region_mask == 1)
        av_thres = None

    # Write mask of pixels not used
    mask = ~indices

    # Mask global data with CO indices
    hi_data_sub = np.copy(hi_data[:, indices])
    noise_cube_sub = np.copy(noise_cube[:, indices])
    av_data_sub = np.copy(av_data[indices])
    av_error_data_sub = np.copy(av_data_error[indices])

    # import matplotlib.pyplot as plt
    # av_plot_data = np.copy(av_data)
    # av_plot_data[~indices] = np.nan
    # plt.imshow(av_plot_data, origin='lower')
    # plt.contour(co_mom0, levels=(6, 12, 24), origin='lower')
    # plt.show()
    # plt.clf()
    # plt.close()

    # Plot the masked image
    av_data_masked = np.copy(av_data)
    av_data_masked[~indices] = np.nan
    figure_types = ["png"]
    for figure_type in figure_types:
        plot_av_image(
            av_image=av_data_masked,
            header=av_header,
            savedir=figure_dir + "../maps/",
            limits=global_props["region_limit"]["pixel"],
            filename="taurus_dgr_co_masked_map." + figure_type,
            show=0,
        )

    # Set global variables
    hi_cube = hi_data_sub
    hi_velocity_axis = velocity_axis
    hi_noise_cube = noise_cube_sub
    av_image = av_data_sub
    av_image_error = av_error_data_sub

    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    # likelihoodelate each core region Av and N(HI) for velocity ranges
    vel_range_confint, dgr_confint, likelihoods, center_likelihood, width_likelihood, dgr_likelihood, center_max, width_max, dgr_max = calc_likelihood_hi_av(
        dgrs=dgrs,
        velocity_centers=velocity_centers,
        velocity_widths=velocity_widths,
        return_likelihoods=True,
        plot_results=True,
        results_filename=results_filename,
        likelihood_filename=likelihood_dir + likelihood_filename + "_global.fits",
        clobber=clobber,
        conf=conf,
        contour_confs=contour_confs,
        multithread=multithread,
    )
    vel_range_max = (center_max - width_max / 2.0, center_max + width_max / 2.0)

    print ("\nHI velocity integration range:")
    print ("%.1f to %.1f km/s" % (vel_range_confint[0], vel_range_confint[1]))
    print ("\nDGR:")
    print ("%.1f x 10^-20 cm^2 mag" % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = calculate_nhi(
        cube=hi_data, velocity_axis=hi_velocity_axis, velocity_range=vel_range_max, noise_cube=noise_cube
    )
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = (av_image_model == av_image_model) & (av_data == av_data)
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask]) ** 2 / av_data_error[~mask] ** 2) / av_data[~mask].size

    print (
        "\nTotal number of pixels in analysis, after masking = " + "{0:.0f}".format(npix)
    ) + "\nGiven a CO threshold of {0:.2f} K km/s".format(co_thres) + "\nand a Av threshold of {0:.2f} mag".format(
        av_thres
    )

    print ("\nReduced chi^2 = {0:.1f}".format(chisq))

    # Write results to global properties
    global_props["dust2gas_ratio"] = {}
    global_props["dust2gas_ratio_error"] = {}
    global_props["hi_velocity_width"] = {}
    global_props["dust2gas_ratio_max"] = {}
    global_props["hi_velocity_center_max"] = {}
    global_props["hi_velocity_width_max"] = {}
    global_props["hi_velocity_range_max"] = {}
    global_props["av_threshold"] = {}
    global_props["co_threshold"] = {}
    global_props["hi_velocity_width"]["value"] = vel_range_confint[1] - vel_range_confint[0]
    global_props["hi_velocity_width"]["unit"] = "km/s"
    global_props["hi_velocity_range"] = vel_range_confint[0:2]
    global_props["hi_velocity_range_error"] = vel_range_confint[2:]
    global_props["dust2gas_ratio"]["value"] = dgr_confint[0]
    global_props["dust2gas_ratio_error"]["value"] = dgr_confint[1:]
    global_props["dust2gas_ratio_max"]["value"] = dgr_max
    global_props["hi_velocity_center_max"]["value"] = center_max
    global_props["hi_velocity_width_max"]["value"] = width_max
    global_props["hi_velocity_range_max"]["value"] = vel_range_max
    global_props["hi_velocity_range_conf"] = conf
    global_props["center_likelihood"] = center_likelihood.tolist()
    global_props["width_likelihood"] = width_likelihood.tolist()
    global_props["dgr_likelihood"] = dgr_likelihood.tolist()
    global_props["vel_centers"] = velocity_centers.tolist()
    global_props["vel_widths"] = velocity_widths.tolist()
    global_props["dgrs"] = dgrs.tolist()
    global_props["likelihoods"] = likelihoods.tolist()
    global_props["av_threshold"]["value"] = av_thres
    global_props["av_threshold"]["unit"] = "mag"
    global_props["co_threshold"]["value"] = co_thres
    global_props["co_threshold"]["unit"] = "K km/s"
    global_props["chisq"] = chisq
    global_props["npix"] = npix
    global_props["mask"] = mask.tolist()

    with open(property_dir + global_property_file, "w") as f:
        json.dump(global_props, f)
def main(av_data_type='planck'):

    # Import external modules
    # -----------------------
    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube
    #from astropy.io import fits
    import pyfits as fits
    import matplotlib.pyplot as plt

    # Set parameters
    # --------------
    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Name of HI noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise'

    # Threshold for converging DGR
    threshold_delta_dgr = 0.00005

    # Number of white noise standard deviations with which to fit the
    # residuals in iterative masking
    resid_width_scale = 3.0

    # Name of property files results are written to
    global_property_file = 'perseus_global_properties.txt'

    # Likelihood axis resolutions
    vel_widths = np.arange(1, 30, 2*0.16667)
    dgrs = np.arange(0.01, 0.2, 1e-3)
    #vel_widths = np.arange(1, 50, 8*0.16667)
    #dgrs = np.arange(0.01, 0.2, 1e-2)

    # Velocity range over which to integrate HI for deriving the mask
    vel_range = (-20, 20)

    # Use binned image?
    use_binned_image = False

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'

    # Load data
    # ---------
    if use_binned_image:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Adjust filenames
    noise_cube_filename += bin_string
    likelihood_filename = 'perseus_likelihood_{0:s}'.format(av_data_type) + \
                          bin_string
    results_filename = 'perseus_likelihood_{0:s}'.format(av_data_type) + \
                       bin_string

    av_data, av_header = fits.getdata(av_dir + \
                            'perseus_av_planck_5arcmin' + bin_string + '.fits',
                                      header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'perseus_av_error_planck_5arcmin' + bin_string + '.fits',
            header=True)

    if use_binned_image:
        #av_data_error = (100 * 0.025**2) * np.ones(av_data_error.shape)
        av_data_error *= 5

    hi_data, hi_header = fits.getdata(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres' + bin_string + '.fits',
            header=True)

    # Load global properties
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    # Prepare data products
    # ---------------------
    # Change WCS coords to pixel coords of images
    global_props = convert_limit_coordinates(global_props, header=av_header)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename + '.fits'):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename + '.fits')
    else:
        noise_cube, noise_header = fits.getdata(hi_dir +
                noise_cube_filename + '.fits',
            header=True)

    # Derive relevant region
    pix = global_props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block off region
    region_mask = np.logical_not(myg.get_polygon_mask(av_data, region_vertices))

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    # Derive mask by excluding correlated residuals
    # ---------------------------------------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    av_model, mask, dgr = iterate_residual_masking(
                             nhi_image=nhi_image,
                             av_data=av_data,
                             av_data_error=av_data_error,
                             vel_range=vel_range,
                             threshold_delta_dgr=threshold_delta_dgr,
                             resid_width_scale=resid_width_scale,
                             init_mask=region_mask,
                             verbose=1,
                             plot_progress=0,
                             )

    # Combine region mask with new mask
    #mask += np.logical_not(region_mask)
    mask += region_mask

    if 1:
        import matplotlib.pyplot as plt
        plt.imshow(np.ma.array(av_data, mask=mask), origin='lower')
        plt.show()

    # Derive center velocity from hi
    # ------------------------------
    hi_spectrum = np.sum(hi_data[:, ~mask], axis=(1))
    vel_center = np.array((np.average(hi_vel_axis,
                           weights=hi_spectrum**2),))[0]
    print('\nVelocity center from HI = ' +\
            '{0:.2f} km/s'.format(vel_center))

    # Perform likelihood calculation of masked images
    # -----------------------------------------------
    # Define filename for plotting results
    results_filename = figure_dir + 'likelihood/'+ results_filename

    results = calc_likelihoods(
                     hi_cube=hi_data[:, ~mask],
                     hi_vel_axis=hi_vel_axis,
                     av_image=av_data[~mask],
                     av_image_error=av_data_error[~mask],
                     vel_center=vel_center,
                     vel_widths=vel_widths,
                     dgrs=dgrs,
                     results_filename='',
                     return_likelihoods=True,
                     likelihood_filename=None,
                     clobber=False,
                     conf=conf,
                     )

    # Unpack output of likelihood calculation
    (vel_range_confint, width_confint, dgr_confint, likelihoods,
            width_likelihood, dgr_likelihood, width_max, dgr_max,
            vel_range_max) = results

    print('\nHI velocity integration range:')
    print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                 vel_range_confint[1]))
    print('\nDGR:')
    print('%.1f x 10^-20 cm^2 mag' % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_range=vel_range_max,
                noise_cube=noise_cube,
                return_nhi_error=True)
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = ((av_image_model == av_image_model) & \
               (av_data == av_data))
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask])**2 / \
            av_data_error[~mask]**2) / av_data[~mask].size

    print('\nTotal number of pixels in analysis, after masking = ' + \
            '{0:.0f}'.format(npix))

    print('\nReduced chi^2 = {0:.1f}'.format(chisq))

    # Write results to global properties
    global_props['dust2gas_ratio'] = {}
    global_props['dust2gas_ratio_error'] = {}
    global_props['hi_velocity_width'] = {}
    global_props['hi_velocity_width_error'] = {}
    global_props['dust2gas_ratio_max'] = {}
    global_props['hi_velocity_center_max'] = {}
    global_props['hi_velocity_width_max'] = {}
    global_props['hi_velocity_range_max'] =  {}
    global_props['av_threshold'] = {}
    global_props['co_threshold'] = {}
    global_props['hi_velocity_width']['value'] = width_confint[0]
    global_props['hi_velocity_width']['unit'] = 'km/s'
    global_props['hi_velocity_width_error']['value'] = width_confint[1:]
    global_props['hi_velocity_width_error']['unit'] = 'km/s'
    global_props['hi_velocity_range'] = vel_range_confint[0:2]
    global_props['hi_velocity_range_error'] = vel_range_confint[2:]
    global_props['dust2gas_ratio']['value'] = dgr_confint[0]
    global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
    global_props['dust2gas_ratio_max']['value'] = dgr_max
    global_props['hi_velocity_center_max']['value'] = vel_center
    global_props['hi_velocity_width_max']['value'] = width_max
    global_props['hi_velocity_range_max']['value'] = vel_range_max
    global_props['hi_velocity_range_conf'] = conf
    global_props['width_likelihood'] = width_likelihood.tolist()
    global_props['dgr_likelihood'] = dgr_likelihood.tolist()
    global_props['vel_centers'] = [vel_center,]
    global_props['vel_widths'] = vel_widths.tolist()
    global_props['dgrs'] = dgrs.tolist()
    global_props['likelihoods'] = likelihoods.tolist()
    global_props['av_threshold']['value'] = None
    global_props['av_threshold']['unit'] = 'mag'
    global_props['co_threshold']['value'] = None
    global_props['co_threshold']['unit'] = 'K km/s'
    global_props['chisq'] = chisq
    global_props['npix'] = npix
    global_props['mask'] = mask.tolist()
    global_props['use_binned_image'] = use_binned_image

    with open(property_dir + global_property_file, 'w') as f:
        json.dump(global_props, f)

    # Plot likelihood space
    print('\nWriting likelihood image to\n' + results_filename + '_wd.png')
    plot_likelihoods_hist(global_props,
                          plot_axes=('widths', 'dgrs'),
                          show=0,
                          returnimage=False,
                          filename=results_filename + '_wd.png',
                          contour_confs=contour_confs)

    if 0:
        plt.clf(); plt.close()
        nhi_image_copy = np.copy(nhi_image)
        nhi_image_copy[mask] = np.nan
        av_image_copy = np.copy(av_data)
        resid_image = av_image_copy - nhi_image_copy * dgr
        plt.imshow(resid_image, origin='lower')
        plt.title(r'$A_V$ Data - Model')
        plt.colorbar()
        plt.show()
def main(av_data_type='planck'):

    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    from astropy.io import fits
    import matplotlib.pyplot as plt

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    likelihood_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)
    results_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)

    # Threshold for converging DGR
    threshold_delta_dgr = 0.00005

    # define directory locations
    # --------------------------
    output_dir = '/home/ezbc/research/data/taurus/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/taurus/figures/hi_velocity_range/'
    av_dir = '/home/ezbc/research/data/taurus/av/'
    hi_dir = '/home/ezbc/research/data/taurus/hi/'
    co_dir = '/home/ezbc/research/data/taurus/co/'
    core_dir = '/home/ezbc/research/data/taurus/python_output/core_properties/'
    property_dir = '/home/ezbc/research/data/taurus/python_output/'
    region_dir = '/home/ezbc/research/data/taurus/python_output/ds9_regions/'
    likelihood_dir = '/home/ezbc/research/data/taurus/python_output/nhi_av/'


    # Load data
    # ---------
    av_data, av_header = fits.getdata(av_dir + \
                                       'taurus_av_planck_5arcmin.fits',
                                       header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'taurus_av_error_planck_5arcmin.fits',
            header=True)

    hi_data, hi_header = fits.getdata(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres.fits',
            header=True)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Velocity range over which to integrate HI
    vel_range = (0, 10)

    # Make Av model
    # -------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    #plt.clf(); plt.close()
    #plt.imshow(nhi_image, origin='lower')
    #plt.show()

    # Mask out nans and high-valued pixels
    mask = ((av_data > 30.0) | \
            np.isnan(av_data) | \
            np.isnan(av_data_error) | \
            (av_data_error == 0) | \
            np.isnan(nhi_image))

    # solve for DGR using linear least squares
    print('\nSolving for DGR...')

    delta_dgr = 1e10
    dgr = 1e10
    while delta_dgr > threshold_delta_dgr:
        A = np.array((np.ravel(nhi_image[~mask] / av_data_error[~mask]),))
        b = np.array((np.ravel(av_data[~mask] / av_data_error[~mask]),))
        A = np.matrix(A).T
        b = np.matrix(b).T
        #dgr = np.dot(np.linalg.pinv(A), b)
        dgr_new = (np.linalg.pinv(A) * b)[0, 0]

        # Create model with the DGR
        print('\nDGR = {0:.2} 10^20 cm^2 mag'.format(dgr))
        av_image_model = nhi_image * dgr_new

        residuals = av_data - av_image_model

        # Include only residuals which are white noise
        mask_new = get_residual_mask(residuals,
                resid_width_scale=2.0, plot_progress=0)

        # Mask non-white noise, i.e. correlated residuals.
        mask += mask_new

        npix = mask.size - np.sum(mask)
        print('Number of non-masked pixels = {0:.0f}'.format(npix))

        # Reset while loop conditions
        delta_dgr = np.abs(dgr - dgr_new)
        dgr = dgr_new

    plt.clf(); plt.close()
    nhi_image_copy = np.copy(nhi_image)
    nhi_image_copy[mask] = np.nan
    av_image_copy = np.copy(av_data)
    resid_image = av_image_copy - nhi_image_copy * dgr
    plt.imshow(resid_image, origin='lower')
    plt.title(r'$A_V$ Data - Model')
    plt.colorbar()
    plt.show()
예제 #44
0
def calc_likelihood_hi_av(hi_cube=None,
                          hi_velocity_axis=None,
                          hi_noise_cube=None,
                          av_image=None,
                          av_image_error=None,
                          velocity_centers=None,
                          velocity_widths=None,
                          return_likelihoods=True,
                          dgrs=None,
                          plot_results=True,
                          results_filename='',
                          likelihood_filename=None,
                          clobber=False,
                          conf=0.68,
                          contour_confs=None):
    '''
    Parameters
    ----------

    Returns
    -------
    hi_vel_range : tuple
        Lower and upper bound of HI velocity range in km/s which provides the
        best likelihoodelated N(HI) distribution with Av.
    likelihoods : array-like, optional
        Array of Pearson likelihoodelation coefficients likelihoodesponding to each
        permutation through the velocity centers and velocity widths.

    '''

    import numpy as np
    from scipy.stats import pearsonr
    from scipy.stats import kendalltau
    from myimage_analysis import calculate_nhi
    from scipy import signal
    from os import path
    from astropy.io import fits

    # Check if likelihood grid should be derived
    if likelihood_filename is not None:
        if not path.isfile(likelihood_filename):
            perform_mle = True
            write_mle = True
        elif clobber:
            perform_mle = True
            write_mle = True
        else:
            perform_mle = False
            write_mle = False
    # If no filename provided, do not read file and do not write file
    else:
        write_mle = False
        perform_mle = True

    if perform_mle:
        # calculate the velocity ranges given a set of centers and widths
        velocity_ranges = np.zeros(shape=[len(velocity_centers) * \
                len(velocity_widths),2])
        count = 0
        for i, center in enumerate(velocity_centers):
            for j, width in enumerate(velocity_widths):
                velocity_ranges[count, 0] = center - width / 2.
                velocity_ranges[count, 1] = center + width / 2.
                count += 1

        # calculate the likelihoodelation coefficient for each velocity range
        likelihoods = np.zeros(
            (len(velocity_centers), len(velocity_widths), len(dgrs)))

        # Progress bar parameters
        total = float(likelihoods.size)
        count = 0

        for i, velocity_center in enumerate(velocity_centers):
            for j, velocity_width in enumerate(velocity_widths):
                for k, dgr in enumerate(dgrs):

                    velocity_range = (velocity_center - velocity_width / 2.,
                                      velocity_center + velocity_width / 2.)

                    nhi_image_temp, nhi_image_error = \
                            calculate_nhi(cube=hi_cube,
                                velocity_axis=hi_velocity_axis,
                                velocity_range=velocity_range,
                                noise_cube=hi_noise_cube)

                    # Avoid NaNs
                    indices = np.where((nhi_image_temp == nhi_image_temp) & \
                                       (av_image == av_image))

                    nhi_image_likelihood = nhi_image_temp[indices]
                    nhi_image_error_likelihood = nhi_image_error[indices]
                    av_image_likelihood = av_image[indices]
                    if type(av_image_error) != float:
                        av_image_error_likelihood = av_image_error[indices]
                    else:
                        av_image_error_likelihood = av_image_error

                    # Create model of Av with N(HI) and DGR
                    av_image_model = nhi_image_likelihood * dgr
                    av_image_model_error = nhi_image_error_likelihood * dgr

                    logL = calc_logL(av_image_model,
                                     av_image_likelihood,
                                     data_error=av_image_error_likelihood)

                    likelihoods[i, j, k] = -logL

                    # Shows progress each 10%
                    count += 1
                    abs_step = int((total * 1) / 100) or 100
                    if count and not count % abs_step:
                        print "\t{0:.0%} processed".format(count / total)

        # Normalize the log likelihoods
        likelihoods -= likelihoods.max()

        # Convert to likelihoods
        likelihoods = np.exp(likelihoods)

        # Normalize the likelihoods
        likelihoods = likelihoods / \
            np.sum(likelihoods[~np.isnan(likelihoods)])

        # Write out fits file of likelihoods
        if write_mle:
            write_mle_tofits(filename=likelihood_filename,
                             velocity_centers=velocity_centers,
                             velocity_widths=velocity_widths,
                             dgrs=dgrs,
                             likelihoods=likelihoods,
                             clobber=clobber)

        # Avoid nans
        likelihoods = np.ma.array(likelihoods,
                                  mask=(likelihoods != likelihoods))

    # Load file of likelihoods
    elif not perform_mle:
        print('Reading likelihood grid file:')
        print(likelihood_filename)

        hdu = fits.open(likelihood_filename)
        likelihoods = hdu[0].data

        if len(velocity_centers) != likelihoods.shape[0] or \
            len(velocity_widths) != likelihoods.shape[1]:
            raise ValueError('Specified parameter grid not the same as in' + \
                    'loaded data likelihoods.')

        likelihoods = np.ma.array(likelihoods,
                                  mask=(likelihoods != likelihoods))

    # Define parameter resolutions
    #delta_center = velocity_centers[1] - velocity_centers[0]
    #delta_width = velocity_widths[1] - velocity_widths[0]

    # Derive marginal distributions of both centers and widths
    center_likelihood = np.sum(likelihoods, axis=(1,2)) / \
            np.sum(likelihoods)
    width_likelihood = np.sum(likelihoods, axis=(0,2)) / \
            np.sum(likelihoods)
    dgr_likelihood = np.sum(likelihoods, axis=(0,1)) / \
            np.sum(likelihoods)

    # Derive confidence intervals of parameters
    center_confint = threshold_area(velocity_centers,
                                    center_likelihood,
                                    area_fraction=conf)
    width_confint = threshold_area(velocity_widths,
                                   width_likelihood,
                                   area_fraction=conf)
    dgr_confint = threshold_area(dgrs, dgr_likelihood, area_fraction=conf)

    print('Velocity widths = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(width_confint[0],
                                                    width_confint[2],
                                                    np.abs(width_confint[1])))
    print('Velocity centers = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(center_confint[0],
                                                    center_confint[2],
                                                    np.abs(center_confint[1])))
    print('DGRs = ' + \
            '{0:.2f} +{1:.2f}/-{2:.2f} km/s'.format(dgr_confint[0],
                                                    dgr_confint[2],
                                                    np.abs(dgr_confint[1])))

    # Write PDF
    center = center_confint[0]
    upper_lim = (center_confint[0] + width_confint[0] / 2.)
    lower_lim = (center_confint[0] - width_confint[0] / 2.)
    upper_lim_error = (center_confint[2]**2 + width_confint[2]**2)**0.5
    lower_lim_error = (center_confint[1]**2 + width_confint[1]**2)**0.5

    vel_range_confint = (lower_lim, upper_lim, lower_lim_error,
                         upper_lim_error)

    if plot_results:
        #plot_likelihoods(likelihoods[:,:, len(dgrs)/2],
        #                  velocity_centers,
        #                  velocity_widths,
        #                  show=0,
        #                  returnimage=False,
        #                  filename=results_filename)
        plot_likelihoods_hist(likelihoods,
                              velocity_centers,
                              velocity_widths,
                              x_confint=center_confint,
                              y_confint=width_confint,
                              plot_axes=('centers', 'widths'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_cw.png',
                              contour_confs=contour_confs)
        plot_likelihoods_hist(likelihoods,
                              velocity_centers,
                              dgrs,
                              x_confint=center_confint,
                              y_confint=dgr_confint,
                              plot_axes=('centers', 'dgrs'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_cd.png',
                              contour_confs=contour_confs)
        plot_likelihoods_hist(likelihoods,
                              velocity_widths,
                              dgrs,
                              x_confint=width_confint,
                              y_confint=dgr_confint,
                              plot_axes=('widths', 'dgrs'),
                              show=0,
                              returnimage=False,
                              filename=results_filename + '_wd.png',
                              contour_confs=contour_confs)

    if not return_likelihoods:
        return vel_range_confint, dgr_confint
    else:
        return (vel_range_confint, dgr_confint, likelihoods, center_likelihood,
                width_likelihood, dgr_likelihood)
예제 #45
0
def main(dgr=None,
         vel_range=None,
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    #import pyfits as fits
    from astropy.io import fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                                             velocity_axis=velocity_axis,
                                             velocity_noise_range=[90, 110],
                                             header=hi_header,
                                             Tsys=30.,
                                             filename=hi_dir +
                                             noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir +
                                                   noise_cube_filename,
                                                   header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    props['plot_limit']['wcs'] = (((5, 20, 0), (19, 0, 0)), ((2, 30, 0),
                                                             (37, 0, 0)))

    props['region_name_pos'] = {
        #'taurus 1' : {'wcs' : ((3, 50,  0),
        #                       (21.5, 0, 0)),
        #             },
        #'taurus 2' : {'wcs' : ((5, 10,  0),
        #                       (21.5, 0, 0)),
        #             },
        'taurus': {
            'wcs': ((4, 40, 0), (21, 0, 0)),
        },
        'perseus': {
            'wcs': ((3, 30, 0), (26, 0, 0)),
        },
        #'perseus 1' : {'wcs' : ((3, 0,  0),
        #                      (34, 0, 0)),
        #             },
        #'perseus 2' : {'wcs' : ((3, 10,  0),
        #                      (22.5, 0, 0)),
        #             },
        'california': {
            'wcs': ((4, 28, 0), (34, 0, 0)),
        },
    }

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits', 'plot_limit',
                                              'region_name_pos'))

    props['plot_limit']['wcs'] = [
        15 * (5 + 20. / 60), 15 * (2 + 30. / 60.), 17, 38.5
    ]

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'multicloud_av_nhi_map' + \
                    '.%s' % figure_type
                #av_data_type + \
                #'dgr{0:.3f}_'.format(dgr) + \
                #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/multicloud_av_model_map' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(
                        vel_range[i, 0], vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'multicloud_av_model_map_region{0:.0f}'.format(region) + \
                       '.{0:s}'.format(figure_type)

        filename = 'av_map'
        filename = figure_dir + 'maps/' + filename + '.' + figure_type
        print('\nSaving Av model image to \n' + filename)

        plot_av_image(
            av_image=av_image,
            header=av_header,
            limits=[15 * (5 + 20. / 60), 15 * (2 + 30. / 60.), 17, 38.5],
            limits_type='wcs',
            regions=props['regions'],
            props=props,
            av_vlimits=(0, 15.5),
            filename=filename,
            show=False)

        if 0:
            filename = 'av_nhi_map'
            filename = figure_dir + 'maps/' + filename + '.' + figure_type
            print('\nSaving NHI + Av maps to \n' + filename)
            plot_nhi_image(
                nhi_image=nhi_image,
                header=av_header,
                av_image=av_image,
                limits=props['plot_limit']['wcs'],
                limits_type='wcs',
                regions=props['regions'],
                props=props,
                hi_vlimits=(0, 20),
                av_vlimits=(0, 15.5),
                #av_vlimits=(0.1,30),
                filename=filename,
                show=False)