Exemple #1
0
def main():

    import grid
    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Course, large grid or fine, small grid?
    grid_res = 'course'
    grid_res = 'fine'

    # Results and fits filenames
    likelihood_filename = 'california_nhi_av_likelihoods'
    results_filename = 'california_likelihood'

    # Define ranges of parameters
    if center_vary and width_vary and dgr_vary:
        likelihood_filename += '_width_dgr_center'
        results_filename += '_width_dgr_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1e-2, 1, 2e-2)
    elif not center_vary and width_vary and dgr_vary:

        if grid_res == 'course':
            likelihood_filename += '_dgr_width_lowres'
            results_filename += '_dgr_width_lowres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 80, 1)
            dgrs = np.arange(1e-2, 1, 2e-2)
        elif grid_res == 'fine':
            likelihood_filename += '_dgr_width_highres'
            results_filename += '_dgr_width_highres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 40, 0.16667)
            dgrs = np.arange(0.05, 0.5, 1e-3)
    elif center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width_center'
        results_filename += '_width_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)
    elif not center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width'
        results_filename += '_width'

        velocity_centers = np.arange(5, 6, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)

    # Which likelihood fits should be performed?
    core_likelihoodelation = 0
    global_likelihoodelation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 1

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                                          velocity_axis=velocity_axis,
                                          velocity_noise_range=[90, 110],
                                          header=h,
                                          Tsys=30.,
                                          filename=hi_dir +
                                          noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                             return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.2e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'california_av_boxes_',
                            header=h)

    if core_likelihoodelation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                                        cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # likelihoodelate each core region Av and N(HI) for velocity ranges
            vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
                width_likelihood, dgr_likelihood = \
                    calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgrs=dgrs,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_likelihoods=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    conf=conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' %
                  (vel_range_confint[0], vel_range_confint[1]))
            print('DGR:')
            print('%.1f to %.1f km/s' %
                  (vel_range_confint[0], vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_likelihood'] = center_likelihood.tolist()
            cores[core]['width_likelihood'] = width_likelihood.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_likelihoodelation:
        print('\nCalculating likelihoods globally')

        mask = np.zeros(av_data_planck.shape)
        for core in cores:
            # Grab the mask
            mask += myg.get_polygon_mask(av_data_planck,
                                         cores[core]['box_vertices_rotated'])

        indices = ((mask == 0) &\
                   (av_data_planck < av_threshold))

        #indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + results_filename

        # likelihoodelate each core region Av and N(HI) for velocity ranges
        vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
            width_likelihood, dgr_likelihood = \
                calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgrs=dgrs,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_likelihoods=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + \
                                        '_global.fits',
                                clobber=clobber,
                                conf=conf,
                                contour_confs=contour_confs)

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' %
              (vel_range_confint[0], vel_range_confint[1]))
        print('DGR:')
        print('%.1f to %.1f km/s' % (dgr_confint[0], dgr_confint[1]))

        global_props['dust2gas_ratio'] = {}
        global_props['dust2gas_ratio_error'] = {}

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['dust2gas_ratio']['value'] = dgr_confint[0]
        global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
        global_props['hi_velocity_range_conf'] = conf
        global_props['center_likelihood'] = center_likelihood.tolist()
        global_props['width_likelihood'] = width_likelihood.tolist()
        global_props['dgr_likelihood'] = dgr_likelihood.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()
        global_props['dgrs'] = dgrs.tolist()
        global_props['likelihoods'] = likelihoods.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
def main():

    import grid
    import numpy as np
    import numpy
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    from multiprocessing import Pool

    global _hi_cube
    global _hi_velocity_axis
    global _hi_noise_cube
    global _av_image
    global _av_image_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Include only pixels within core regions for analysis?
    core_mask = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Results and fits filenames
    likelihood_filename = 'perseus_nhi_av_likelihoods_mcmc_co_av'
    results_filename = 'perseus_likelihood_mcmc_co_av'
    global _progress_filename
    _progress_filename = 'perseus_mcmc_samples.dat'

    # Define ranges of parameters
    global _av_thres_range
    _av_thres_range = (1.0, 1.1)
    _av_thres_range = (0.1, 2.0)
    global _vel_width_range
    _vel_width_range = (0.0, 80.0)
    global _dgr_range
    _dgr_range = (0.01, 0.4)
    global _velocity_center
    _velocity_center = 5.0 # km/s

    # MCMC parameters
    global _ndim
    _ndim = 3
    global _nwalkers
    _nwalkers = 100
    global _niter
    _niter = 1000
    global _init_guesses
    _init_guesses = np.array((10, 0.10, 1.0))
    global _init_spread
    _init_spread = np.array((0.1, 0.01, 0.01))
    global _mc_threads
    _mc_threads = 10

    # Name of property files results are written to
    global_property_file = 'perseus_global_properties.txt'
    core_property_file = 'perseus_core_properties.txt'

    # Name of noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise.fits'

    # Define limits for plotting the map
    prop_dict = {}
    prop_dict['limit_wcs'] = (((3, 58, 0), (27, 6, 0)),
                              ((3, 20, 0), (35, 0, 0)))
    prop_dict['limit_wcs'] = (((3, 58, 0), (26, 6, 0)),
                              ((3, 0, 0), (35, 0, 0)))

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    global _likelihood_dir
    _likelihood_dir = likelihood_dir

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'perseus_av_planck_5arcmin.fits',
            return_header=True)
    prop_dict['av_header'] = av_header

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'perseus_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'perseus_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=h, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'perseus_av_boxes_',
            header = h)

    print('\nCalculating likelihoods globally')

    mask = np.zeros(av_data_planck.shape)
    for core in cores:
        # Grab the mask
        mask += myg.get_polygon_mask(av_data_planck,
                cores[core]['wedge_vertices_rotated'])

    co_mom0 = np.sum(co_data, axis=0)

    # Mask images
    core_mask = 0
    if core_mask:
        indices = ((mask == 1) & \
                   (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)])*2.0))
        mask_type = '_core_mask'
    else:
        indices = (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)])*2.0)
        mask_type = ''

    hi_data_sub = np.copy(hi_data[:, indices])
    noise_cube_sub = np.copy(noise_cube[:, indices])
    av_data_sub = np.copy(av_data_planck[indices])
    av_error_data_sub = np.copy(av_error_data_planck[indices])

    # Set global variables
    _hi_cube = hi_data_sub
    _hi_velocity_axis = velocity_axis
    _hi_noise_cube = noise_cube_sub
    _av_image = av_data_sub
    _av_image_error = av_error_data_sub

    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    # likelihoodelate each core region Av and N(HI) for velocity ranges
    vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
        width_likelihood, dgr_likelihood = \
            calc_likelihood(return_likelihoods=True,
                            plot_results=True,
                            results_filename=results_filename + mask_type,
                            likelihood_filename=likelihood_dir + \
                                    likelihood_filename + \
                                    mask_type + '.npy',
                            clobber=clobber,
                            conf=conf,
                            contour_confs=contour_confs)

    '''
def main(av_data_type='planck'):

    # Import external modules
    # -----------------------
    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube
    #from astropy.io import fits
    import pyfits as fits
    import matplotlib.pyplot as plt

    # Set parameters
    # --------------
    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Name of HI noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise'

    # Threshold for converging DGR
    threshold_delta_dgr = 0.00005

    # Number of white noise standard deviations with which to fit the
    # residuals in iterative masking
    resid_width_scale = 3.0

    # Name of property files results are written to
    global_property_file = 'perseus_global_properties.txt'

    # Likelihood axis resolutions
    vel_widths = np.arange(1, 30, 2*0.16667)
    dgrs = np.arange(0.01, 0.2, 1e-3)
    #vel_widths = np.arange(1, 50, 8*0.16667)
    #dgrs = np.arange(0.01, 0.2, 1e-2)

    # Velocity range over which to integrate HI for deriving the mask
    vel_range = (-20, 20)

    # Use binned image?
    use_binned_image = False

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'

    # Load data
    # ---------
    if use_binned_image:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Adjust filenames
    noise_cube_filename += bin_string
    likelihood_filename = 'perseus_likelihood_{0:s}'.format(av_data_type) + \
                          bin_string
    results_filename = 'perseus_likelihood_{0:s}'.format(av_data_type) + \
                       bin_string

    av_data, av_header = fits.getdata(av_dir + \
                            'perseus_av_planck_5arcmin' + bin_string + '.fits',
                                      header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'perseus_av_error_planck_5arcmin' + bin_string + '.fits',
            header=True)

    if use_binned_image:
        #av_data_error = (100 * 0.025**2) * np.ones(av_data_error.shape)
        av_data_error *= 5

    hi_data, hi_header = fits.getdata(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres' + bin_string + '.fits',
            header=True)

    # Load global properties
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    # Prepare data products
    # ---------------------
    # Change WCS coords to pixel coords of images
    global_props = convert_limit_coordinates(global_props, header=av_header)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename + '.fits'):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename + '.fits')
    else:
        noise_cube, noise_header = fits.getdata(hi_dir +
                noise_cube_filename + '.fits',
            header=True)

    # Derive relevant region
    pix = global_props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block off region
    region_mask = np.logical_not(myg.get_polygon_mask(av_data, region_vertices))

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    # Derive mask by excluding correlated residuals
    # ---------------------------------------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    av_model, mask, dgr = iterate_residual_masking(
                             nhi_image=nhi_image,
                             av_data=av_data,
                             av_data_error=av_data_error,
                             vel_range=vel_range,
                             threshold_delta_dgr=threshold_delta_dgr,
                             resid_width_scale=resid_width_scale,
                             init_mask=region_mask,
                             verbose=1,
                             plot_progress=0,
                             )

    # Combine region mask with new mask
    #mask += np.logical_not(region_mask)
    mask += region_mask

    if 1:
        import matplotlib.pyplot as plt
        plt.imshow(np.ma.array(av_data, mask=mask), origin='lower')
        plt.show()

    # Derive center velocity from hi
    # ------------------------------
    hi_spectrum = np.sum(hi_data[:, ~mask], axis=(1))
    vel_center = np.array((np.average(hi_vel_axis,
                           weights=hi_spectrum**2),))[0]
    print('\nVelocity center from HI = ' +\
            '{0:.2f} km/s'.format(vel_center))

    # Perform likelihood calculation of masked images
    # -----------------------------------------------
    # Define filename for plotting results
    results_filename = figure_dir + 'likelihood/'+ results_filename

    results = calc_likelihoods(
                     hi_cube=hi_data[:, ~mask],
                     hi_vel_axis=hi_vel_axis,
                     av_image=av_data[~mask],
                     av_image_error=av_data_error[~mask],
                     vel_center=vel_center,
                     vel_widths=vel_widths,
                     dgrs=dgrs,
                     results_filename='',
                     return_likelihoods=True,
                     likelihood_filename=None,
                     clobber=False,
                     conf=conf,
                     )

    # Unpack output of likelihood calculation
    (vel_range_confint, width_confint, dgr_confint, likelihoods,
            width_likelihood, dgr_likelihood, width_max, dgr_max,
            vel_range_max) = results

    print('\nHI velocity integration range:')
    print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                 vel_range_confint[1]))
    print('\nDGR:')
    print('%.1f x 10^-20 cm^2 mag' % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_range=vel_range_max,
                noise_cube=noise_cube,
                return_nhi_error=True)
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = ((av_image_model == av_image_model) & \
               (av_data == av_data))
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask])**2 / \
            av_data_error[~mask]**2) / av_data[~mask].size

    print('\nTotal number of pixels in analysis, after masking = ' + \
            '{0:.0f}'.format(npix))

    print('\nReduced chi^2 = {0:.1f}'.format(chisq))

    # Write results to global properties
    global_props['dust2gas_ratio'] = {}
    global_props['dust2gas_ratio_error'] = {}
    global_props['hi_velocity_width'] = {}
    global_props['hi_velocity_width_error'] = {}
    global_props['dust2gas_ratio_max'] = {}
    global_props['hi_velocity_center_max'] = {}
    global_props['hi_velocity_width_max'] = {}
    global_props['hi_velocity_range_max'] =  {}
    global_props['av_threshold'] = {}
    global_props['co_threshold'] = {}
    global_props['hi_velocity_width']['value'] = width_confint[0]
    global_props['hi_velocity_width']['unit'] = 'km/s'
    global_props['hi_velocity_width_error']['value'] = width_confint[1:]
    global_props['hi_velocity_width_error']['unit'] = 'km/s'
    global_props['hi_velocity_range'] = vel_range_confint[0:2]
    global_props['hi_velocity_range_error'] = vel_range_confint[2:]
    global_props['dust2gas_ratio']['value'] = dgr_confint[0]
    global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
    global_props['dust2gas_ratio_max']['value'] = dgr_max
    global_props['hi_velocity_center_max']['value'] = vel_center
    global_props['hi_velocity_width_max']['value'] = width_max
    global_props['hi_velocity_range_max']['value'] = vel_range_max
    global_props['hi_velocity_range_conf'] = conf
    global_props['width_likelihood'] = width_likelihood.tolist()
    global_props['dgr_likelihood'] = dgr_likelihood.tolist()
    global_props['vel_centers'] = [vel_center,]
    global_props['vel_widths'] = vel_widths.tolist()
    global_props['dgrs'] = dgrs.tolist()
    global_props['likelihoods'] = likelihoods.tolist()
    global_props['av_threshold']['value'] = None
    global_props['av_threshold']['unit'] = 'mag'
    global_props['co_threshold']['value'] = None
    global_props['co_threshold']['unit'] = 'K km/s'
    global_props['chisq'] = chisq
    global_props['npix'] = npix
    global_props['mask'] = mask.tolist()
    global_props['use_binned_image'] = use_binned_image

    with open(property_dir + global_property_file, 'w') as f:
        json.dump(global_props, f)

    # Plot likelihood space
    print('\nWriting likelihood image to\n' + results_filename + '_wd.png')
    plot_likelihoods_hist(global_props,
                          plot_axes=('widths', 'dgrs'),
                          show=0,
                          returnimage=False,
                          filename=results_filename + '_wd.png',
                          contour_confs=contour_confs)

    if 0:
        plt.clf(); plt.close()
        nhi_image_copy = np.copy(nhi_image)
        nhi_image_copy[mask] = np.nan
        av_image_copy = np.copy(av_data)
        resid_image = av_image_copy - nhi_image_copy * dgr
        plt.imshow(resid_image, origin='lower')
        plt.title(r'$A_V$ Data - Model')
        plt.colorbar()
        plt.show()
def main():

    import grid
    import numpy as np
    import numpy
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or correlation with Av?
    hi_av_correlation = True
    velocity_centers = np.arange(-15, 30, 4)
    velocity_widths = np.arange(1, 80, 4)

    # Which likelihood fits should be performed?
    core_correlation = 0
    global_correlation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 100

    # Check if likelihood file already written, rewrite?>
    likelihood_filename = 'california_nhi_av_likelihoods'
    clobber = 0
    hi_vel_range_conf = 0.50

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=h, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.22e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'california_av_boxes_',
            header = h)

    if core_correlation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # Correlate each core region Av and N(HI) for velocity ranges
            vel_range_confint, correlations, center_corr, width_corr = \
                    correlate_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgr=dgr,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_correlations=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    hi_vel_range_conf=hi_vel_range_conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                         vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_corr'] = center_corr.tolist()
            cores[core]['width_corr'] = width_corr.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_correlation:
        print('\nCalculating correlations globally')

        indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + 'california_logL_global.png'

        # Correlate each core region Av and N(HI) for velocity ranges
        vel_range_confint, correlations, center_corr, width_corr = \
                correlate_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgr=dgr,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_correlations=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + '_global.fits',
                                clobber=clobber,
                                hi_vel_range_conf=hi_vel_range_conf)

        '''
        fit_hi_vel_range(guesses=(0, 30),
                         av_image=av_data_sub,
                         av_image_error=av_error_data_sub,
                         hi_cube=hi_data_sub,
                         hi_velocity_axis=velocity_axis,
                         hi_noise_cube=noise_cube_sub,
                         dgr=dgr)
        '''

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                     vel_range_confint[1]))

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['hi_velocity_range_conf'] = hi_vel_range_conf
        global_props['center_corr'] = center_corr.tolist()
        global_props['width_corr'] = width_corr.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
Exemple #5
0
def main(av_data_type='planck'):

    # Import external modules
    # -----------------------
    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube
    #from astropy.io import fits
    import pyfits as fits
    import matplotlib.pyplot as plt

    # Set parameters
    # --------------
    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    likelihood_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)
    results_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)

    # Name of HI noise cube
    noise_cube_filename = 'taurus_hi_galfa_cube_regrid_planckres_noise'

    # Threshold for converging DGR
    threshold_delta_dgr = 0.0005

    # Number of white noise standard deviations with which to fit the
    # residuals in iterative masking
    resid_width_scale = 2.0

    # Name of property files results are written to
    global_property_file = 'taurus_global_properties.txt'

    # Likelihood axis resolutions
    vel_widths = np.arange(1, 30, 2*0.16667)
    dgrs = np.arange(0.01, 0.2, 1e-3)

    # Velocity range over which to integrate HI for deriving the mask
    vel_range = (-10, 10)

    # Use binned image?
    use_binned_image = False

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/taurus/figures/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    co_dir = '/d/bip3/ezbc/taurus/data/co/'
    core_dir = '/d/bip3/ezbc/taurus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/taurus/data/python_output/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'

    # Load data
    # ---------
    if use_binned_image:
        bin_string = '_bin'
    else:
        bin_string = ''
    noise_cube_filename += bin_string

    av_data, av_header = fits.getdata(av_dir + \
                            'taurus_av_planck_5arcmin' + bin_string + '.fits',
                                      header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'taurus_av_error_planck_5arcmin' + bin_string + '.fits',
            header=True)
    #av_data_error = (100 * 0.025**2) * np.ones(av_data_error.shape)
    #av_data_error *= 10.0

    hi_data, hi_header = fits.getdata(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres' + bin_string + '.fits',
            header=True)

    # Load global properties
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    # Prepare data products
    # ---------------------
    # Change WCS coords to pixel coords of images
    global_props = convert_limit_coordinates(global_props, header=av_header)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename + '.fits'):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename + '.fits')
    else:
        noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename,
            header=True)

    # Derive relevant region
    pix = global_props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block off region
    region_mask = myg.get_polygon_mask(av_data, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    # Derive mask by excluding correlated residuals
    # ---------------------------------------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    av_model, mask, dgr = iterate_residual_masking(
                             nhi_image=nhi_image,
                             av_data=av_data,
                             av_data_error=av_data_error,
                             vel_range=vel_range,
                             threshold_delta_dgr=threshold_delta_dgr,
                             resid_width_scale=resid_width_scale,
                             plot_progress=False
                             )

    # Combine region mask with new mask
    mask += np.logical_not(region_mask)

    # Derive center velocity from hi
    # ------------------------------
    hi_spectrum = np.sum(hi_data[:, ~mask], axis=(1))
    vel_center = np.array((np.average(hi_vel_axis,
                           weights=hi_spectrum**2),))[0]
    print('\nVelocity center from HI = ' +\
            '{0:.2f} km/s'.format(vel_center))

    # Perform likelihood calculation of masked images
    # -----------------------------------------------
    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    results = calc_likelihoods(
                     hi_cube=hi_data[:, ~mask],
                     hi_vel_axis=hi_vel_axis,
                     av_image=av_data[~mask],
                     av_image_error=av_data_error[~mask],
                     vel_center=vel_center,
                     vel_widths=vel_widths,
                     dgrs=dgrs,
                     results_filename='',
                     return_likelihoods=True,
                     likelihood_filename=None,
                     clobber=False,
                     conf=conf,
                     )

    # Unpack output of likelihood calculation
    (vel_range_confint, width_confint, dgr_confint, likelihoods,
            width_likelihood, dgr_likelihood, width_max, dgr_max,
            vel_range_max) = results

    print('\nHI velocity integration range:')
    print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                 vel_range_confint[1]))
    print('\nDGR:')
    print('%.1f x 10^-20 cm^2 mag' % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_range=vel_range_max,
                noise_cube=noise_cube,
                return_nhi_error=True)
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = ((av_image_model == av_image_model) & \
               (av_data == av_data))
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask])**2 / \
            av_data_error[~mask]**2) / av_data[~mask].size

    print('\nTotal number of pixels in analysis, after masking = ' + \
            '{0:.0f}'.format(npix))

    print('\nReduced chi^2 = {0:.1f}'.format(chisq))

    # Write results to global properties
    global_props['dust2gas_ratio'] = {}
    global_props['dust2gas_ratio_error'] = {}
    global_props['hi_velocity_width'] = {}
    global_props['hi_velocity_width_error'] = {}
    global_props['dust2gas_ratio_max'] = {}
    global_props['hi_velocity_center_max'] = {}
    global_props['hi_velocity_width_max'] = {}
    global_props['hi_velocity_range_max'] =  {}
    global_props['av_threshold'] = {}
    global_props['co_threshold'] = {}
    global_props['hi_velocity_width']['value'] = width_confint[0]
    global_props['hi_velocity_width']['unit'] = 'km/s'
    global_props['hi_velocity_width_error']['value'] = width_confint[1:]
    global_props['hi_velocity_width_error']['unit'] = 'km/s'
    global_props['hi_velocity_range'] = vel_range_confint[0:2]
    global_props['hi_velocity_range_error'] = vel_range_confint[2:]
    global_props['dust2gas_ratio']['value'] = dgr_confint[0]
    global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
    global_props['dust2gas_ratio_max']['value'] = dgr_max
    global_props['hi_velocity_center_max']['value'] = vel_center
    global_props['hi_velocity_width_max']['value'] = width_max
    global_props['hi_velocity_range_max']['value'] = vel_range_max
    global_props['hi_velocity_range_conf'] = conf
    global_props['width_likelihood'] = width_likelihood.tolist()
    global_props['dgr_likelihood'] = dgr_likelihood.tolist()
    global_props['vel_centers'] = [vel_center,]
    global_props['vel_widths'] = vel_widths.tolist()
    global_props['dgrs'] = dgrs.tolist()
    global_props['likelihoods'] = likelihoods.tolist()
    global_props['av_threshold']['value'] = None
    global_props['av_threshold']['unit'] = 'mag'
    global_props['co_threshold']['value'] = None
    global_props['co_threshold']['unit'] = 'K km/s'
    global_props['chisq'] = chisq
    global_props['npix'] = npix
    global_props['mask'] = mask.tolist()

    with open(property_dir + global_property_file, 'w') as f:
        json.dump(global_props, f)

    # Plot likelihood space
    plot_likelihoods_hist(global_props,
                          plot_axes=('widths', 'dgrs'),
                          show=0,
                          returnimage=False,
                          filename=results_filename + '_wd.png',
                          contour_confs=contour_confs)

    plt.clf(); plt.close()
    nhi_image_copy = np.copy(nhi_image)
    nhi_image_copy[mask] = np.nan
    av_image_copy = np.copy(av_data)
    resid_image = av_image_copy - nhi_image_copy * dgr
    plt.imshow(resid_image, origin='lower')
    plt.title(r'$A_V$ Data - Model')
    plt.colorbar()
    plt.show()
Exemple #6
0
def main(dgr=None,
         vel_range=(-5, 15),
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Which cores to include in analysis?
    cores_to_keep = [  # taur
        'L1495',
        'L1495A',
        'B213',
        'L1498',
        'B215',
        'B18',
        'B217',
        'B220-1',
        'B220-2',
        'L1521',
        'L1524',
        'L1527-1',
        'L1527-2',
        # Calif
        'L1536',
        'L1483-1',
        'L1483-2',
        'L1482-1',
        'L1482-2',
        'L1478-1',
        'L1478-2',
        'L1456',
        'NGC1579',
        #'L1545',
        #'L1517',
        #'L1512',
        #'L1523',
        #'L1512',
        # Pers
        'B5',
        'IC348',
        'B1E',
        'B1',
        'NGC1333',
        'B4',
        'B3',
        'L1455',
        'L1448',
    ]

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_radiance_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_radiance_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nLoading global property file {0:s}.txt'.format(prop_file))
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    # Define velocity range
    props['hi_velocity_range'] = vel_range

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                                             velocity_axis=velocity_axis,
                                             velocity_noise_range=[90, 110],
                                             header=hi_header,
                                             Tsys=30.,
                                             filename=hi_dir +
                                             noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir +
                                                   noise_cube_filename,
                                                   header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits', 'plot_limit',
                                              'region_name_pos'))

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    cloud_dict = {
        'taurus': {},
        'perseus': {},
        'california': {},
    }

    # load Planck Av and GALFA HI images, on same grid
    for cloud in cloud_dict:

        print('\nLoading core properties for {0:s}'.format(cloud))

        file_dir = '/d/bip3/ezbc/{0:s}/data/av/'.format(cloud)

        # define core properties
        with open('/d/bip3/ezbc/{0:s}/data/python_output/'.format(cloud) + \
                  'core_properties/{0:s}_core_properties.txt'.format(cloud),
                  'r') as f:
            cores = json.load(f)

        # Load core regions from DS9 files
        if cloud == 'aldobaran':
            region_cloud = 'california'
        else:
            region_cloud = cloud
        core_filename = region_dir.replace('multicloud',region_cloud) + \
                        '/ds9_regions/{0:s}_av_poly_cores'.format(region_cloud)

        cores = load_ds9_core_region(cores,
                                     filename_base=core_filename,
                                     header=av_header)

        cores = convert_core_coordinates(cores, av_header)

        # Remove cores
        cores_to_remove = []
        for core in cores:
            if core not in cores_to_keep:
                cores_to_remove.append(core)
        for core_to_remove in cores_to_remove:
            del cores[core_to_remove]

        cloud_dict[cloud]['cores'] = cores

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        filename = 'multicloud_av_cores_map' + \
                   '.{0:s}'.format(figure_type)

        print('\nSaving Av cores map to \n' + filename)

        plot_cores_map(
            header=av_header,
            av_image=av_image,
            limits=props['plot_limit']['pixel'],
            regions=props['regions'],
            cloud_dict=cloud_dict,
            cores_to_keep=cores_to_keep,
            props=props,
            hi_vlimits=(0, 20),
            av_vlimits=(0, 16),
            #av_vlimits=(0.1,30),
            savedir=figure_dir + 'maps/',
            filename=filename,
            show=False)
def main():

    import grid
    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or correlation with Av?
    hi_av_correlation = True
    velocity_centers = np.arange(-15, 30, 4)
    velocity_widths = np.arange(1, 80, 4)

    # Which likelihood fits should be performed?
    core_correlation = 0
    global_correlation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 100

    # Check if likelihood file already written, rewrite?>
    likelihood_filename = 'california_nhi_av_likelihoods'
    clobber = 0
    hi_vel_range_conf = 0.50

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                                          velocity_axis=velocity_axis,
                                          velocity_noise_range=[90, 110],
                                          header=h,
                                          Tsys=30.,
                                          filename=hi_dir +
                                          noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                             return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.22e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'california_av_boxes_',
                            header=h)

    if core_correlation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                                        cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # Correlate each core region Av and N(HI) for velocity ranges
            vel_range_confint, correlations, center_corr, width_corr = \
                    correlate_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgr=dgr,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_correlations=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    hi_vel_range_conf=hi_vel_range_conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' %
                  (vel_range_confint[0], vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_corr'] = center_corr.tolist()
            cores[core]['width_corr'] = width_corr.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_correlation:
        print('\nCalculating correlations globally')

        indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + 'california_logL_global.png'

        # Correlate each core region Av and N(HI) for velocity ranges
        vel_range_confint, correlations, center_corr, width_corr = \
                correlate_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgr=dgr,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_correlations=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + '_global.fits',
                                clobber=clobber,
                                hi_vel_range_conf=hi_vel_range_conf)
        '''
        fit_hi_vel_range(guesses=(0, 30),
                         av_image=av_data_sub,
                         av_image_error=av_error_data_sub,
                         hi_cube=hi_data_sub,
                         hi_velocity_axis=velocity_axis,
                         hi_noise_cube=noise_cube_sub,
                         dgr=dgr)
        '''

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' %
              (vel_range_confint[0], vel_range_confint[1]))

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['hi_velocity_range_conf'] = hi_vel_range_conf
        global_props['center_corr'] = center_corr.tolist()
        global_props['width_corr'] = width_corr.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
def main():

    import grid
    import numpy as np
    import numpy
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Course, large grid or fine, small grid?
    grid_res = 'course'
    grid_res = 'fine'

    # Results and fits filenames
    likelihood_filename = 'california_nhi_av_likelihoods'
    results_filename = 'california_likelihood'

    # Define ranges of parameters
    if center_vary and width_vary and dgr_vary:
        likelihood_filename += '_width_dgr_center'
        results_filename += '_width_dgr_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1e-2, 1, 2e-2)
    elif not center_vary and width_vary and dgr_vary:

        if grid_res == 'course':
            likelihood_filename += '_dgr_width_lowres'
            results_filename += '_dgr_width_lowres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 80, 1)
            dgrs = np.arange(1e-2, 1, 2e-2)
        elif grid_res == 'fine':
            likelihood_filename += '_dgr_width_highres'
            results_filename += '_dgr_width_highres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 40, 0.16667)
            dgrs = np.arange(0.05, 0.5, 1e-3)
    elif center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width_center'
        results_filename += '_width_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)
    elif not center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width'
        results_filename += '_width'

        velocity_centers = np.arange(5, 6, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)

    # Which likelihood fits should be performed?
    core_likelihoodelation = 0
    global_likelihoodelation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 1

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=h, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.2e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'california_av_boxes_',
            header = h)

    if core_likelihoodelation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # likelihoodelate each core region Av and N(HI) for velocity ranges
            vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
                width_likelihood, dgr_likelihood = \
                    calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgrs=dgrs,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_likelihoods=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    conf=conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                         vel_range_confint[1]))
            print('DGR:')
            print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                         vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_likelihood'] = center_likelihood.tolist()
            cores[core]['width_likelihood'] = width_likelihood.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_likelihoodelation:
        print('\nCalculating likelihoods globally')


        mask = np.zeros(av_data_planck.shape)
        for core in cores:
            # Grab the mask
            mask += myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])

        indices = ((mask == 0) &\
                   (av_data_planck < av_threshold))


        #indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + results_filename

        # likelihoodelate each core region Av and N(HI) for velocity ranges
        vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
            width_likelihood, dgr_likelihood = \
                calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgrs=dgrs,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_likelihoods=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + \
                                        '_global.fits',
                                clobber=clobber,
                                conf=conf,
                                contour_confs=contour_confs)

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                     vel_range_confint[1]))
        print('DGR:')
        print('%.1f to %.1f km/s' % (dgr_confint[0],
                                     dgr_confint[1]))

        global_props['dust2gas_ratio'] = {}
        global_props['dust2gas_ratio_error'] = {}

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['dust2gas_ratio']['value'] = dgr_confint[0]
        global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
        global_props['hi_velocity_range_conf'] = conf
        global_props['center_likelihood'] = center_likelihood.tolist()
        global_props['width_likelihood'] = width_likelihood.tolist()
        global_props['dgr_likelihood'] = dgr_likelihood.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()
        global_props['dgrs'] = dgrs.tolist()
        global_props['likelihoods'] = likelihoods.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
def main():

    import grid
    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    from multiprocessing import Pool

    global _hi_cube
    global _hi_velocity_axis
    global _hi_noise_cube
    global _av_image
    global _av_image_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Include only pixels within core regions for analysis?
    core_mask = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Results and fits filenames
    likelihood_filename = 'perseus_nhi_av_likelihoods_mcmc_co_av'
    results_filename = 'perseus_likelihood_mcmc_co_av'
    global _progress_filename
    _progress_filename = 'perseus_mcmc_samples.dat'

    # Define ranges of parameters
    global _av_thres_range
    _av_thres_range = (1.0, 1.1)
    _av_thres_range = (0.1, 2.0)
    global _vel_width_range
    _vel_width_range = (0.0, 80.0)
    global _dgr_range
    _dgr_range = (0.01, 0.4)
    global _velocity_center
    _velocity_center = 5.0  # km/s

    # MCMC parameters
    global _ndim
    _ndim = 3
    global _nwalkers
    _nwalkers = 100
    global _niter
    _niter = 1000
    global _init_guesses
    _init_guesses = np.array((10, 0.10, 1.0))
    global _init_spread
    _init_spread = np.array((0.1, 0.01, 0.01))
    global _mc_threads
    _mc_threads = 10

    # Name of property files results are written to
    global_property_file = 'perseus_global_properties.txt'
    core_property_file = 'perseus_core_properties.txt'

    # Name of noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise.fits'

    # Define limits for plotting the map
    prop_dict = {}
    prop_dict['limit_wcs'] = (((3, 58, 0), (27, 6, 0)), ((3, 20, 0), (35, 0,
                                                                      0)))
    prop_dict['limit_wcs'] = (((3, 58, 0), (26, 6, 0)), ((3, 0, 0), (35, 0,
                                                                     0)))

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    global _likelihood_dir
    _likelihood_dir = likelihood_dir

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'perseus_av_planck_5arcmin.fits',
            return_header=True)
    prop_dict['av_header'] = av_header

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'perseus_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'perseus_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                                          velocity_axis=velocity_axis,
                                          velocity_noise_range=[90, 110],
                                          header=h,
                                          Tsys=30.,
                                          filename=hi_dir +
                                          noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                             return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'perseus_av_boxes_',
                            header=h)

    print('\nCalculating likelihoods globally')

    mask = np.zeros(av_data_planck.shape)
    for core in cores:
        # Grab the mask
        mask += myg.get_polygon_mask(av_data_planck,
                                     cores[core]['wedge_vertices_rotated'])

    co_mom0 = np.sum(co_data, axis=0)

    # Mask images
    core_mask = 0
    if core_mask:
        indices = ((mask == 1) & \
                   (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)])*2.0))
        mask_type = '_core_mask'
    else:
        indices = (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)]) * 2.0)
        mask_type = ''

    hi_data_sub = np.copy(hi_data[:, indices])
    noise_cube_sub = np.copy(noise_cube[:, indices])
    av_data_sub = np.copy(av_data_planck[indices])
    av_error_data_sub = np.copy(av_error_data_planck[indices])

    # Set global variables
    _hi_cube = hi_data_sub
    _hi_velocity_axis = velocity_axis
    _hi_noise_cube = noise_cube_sub
    _av_image = av_data_sub
    _av_image_error = av_error_data_sub

    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    # likelihoodelate each core region Av and N(HI) for velocity ranges
    vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
        width_likelihood, dgr_likelihood = \
            calc_likelihood(return_likelihoods=True,
                            plot_results=True,
                            results_filename=results_filename + mask_type,
                            likelihood_filename=likelihood_dir + \
                                    likelihood_filename + \
                                    mask_type + '.npy',
                            clobber=clobber,
                            conf=conf,
                            contour_confs=contour_confs)
    '''
Exemple #10
0
def run_cloud_analysis(global_args, ):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
        'figure_dir': figure_dir,
        'cloud_name': cloud_name,
        'filename_base': filename_base,
        'plot_diagnostics': global_args['plot_diagnostics'],
        #'av_nhi_contour': av_nhi_contour,
        'av_nhi_contour': True,
        'av_nhi_limits': [0, 20, -1, 9],
        #'av_nhi_limits': None,
    }

    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])

    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)

    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)

    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                                 statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
        'velocity_range': velocity_range,
        'gauss_fits': gauss_fits,
        'comp_num': comp_num,
        'hi_range_error': hi_range_error,
        'vel_range': velocity_range,
        'gauss_fit_kwargs': gauss_fit_kwargs,
    }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_spectrum,
        gauss_fits=gauss_fits,
        comp_num=comp_num,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_dr1_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_dr1_spectrum,
        gauss_fits=gauss_fits_dr1,
        comp_num=comp_num_dr1,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range_dr1,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(
        nhi_image=nhi_image / nhi_image_dr1,
        header=hi_header,
        limits=[65, 45, 25, 35],
        filename=filename,
        show=0,
        cb_text='DR2 / DR1',
        #hi_vlimits=[0.91, 0.93],
    )
def main():

    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube

    global hi_cube
    global hi_velocity_axis
    global hi_noise_cube
    global av_image
    global av_image_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Course, large grid or fine, small grid?
    grid_res = "fine"
    grid_res = "course"

    # Use multithreading?
    multithread = False

    # Use Av+CO mask or only CO?
    av_and_co_mask = True

    # Derive CO mask? If co_thres = None, co_thres will be 2 * std(co)
    co_thres = 6.00  # K km/s

    # Threshold of Av below which we expect only atomic gas, in mag
    av_thres = 1.4

    # Results and fits filenames
    if av_and_co_mask:
        likelihood_filename = "taurus_nhi_av_likelihoods_co_" + "av{0:.1f}mag".format(av_thres)
        results_filename = "taurus_likelihood_co_" + "av{0:.1f}mag".format(av_thres)
    else:
        likelihood_filename = "taurus_nhi_av_likelihoods_co_only"
        results_filename = "taurus_likelihood_co_only"

    # Name of property files results are written to
    global_property_file = "taurus_global_properties.txt"
    core_property_file = "taurus_core_properties.txt"

    # Name of noise cube
    noise_cube_filename = "taurus_hi_galfa_cube_regrid_planckres_noise.fits"

    # Define ranges of parameters
    if center_vary and width_vary and dgr_vary:
        likelihood_filename += "_width_dgr_center"
        results_filename += "_width_dgr_center"

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1e-2, 1, 2e-2)
    elif not center_vary and width_vary and dgr_vary:
        if grid_res == "course":
            likelihood_filename += "_dgr_width_lowres"
            results_filename += "_dgr_width_lowres"
            velocity_centers = np.arange(-5, 10, 10 * 0.16667)
            velocity_widths = np.arange(1, 30, 10 * 0.16667)
            dgrs = np.arange(0.05, 0.7, 2e-2)
        elif grid_res == "fine":
            likelihood_filename += "_dgr_width_highres"
            results_filename += "_dgr_width_highres"
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 100, 0.16667)
            dgrs = np.arange(0.15, 0.4, 1e-3)
            velocity_widths = np.arange(1, 15, 0.16667)
            dgrs = np.arange(0.1, 0.9, 3e-3)
            # velocity_widths = np.arange(1, 40, 1)
            # dgrs = np.arange(0.15, 0.4, 1e-1)
    elif center_vary and width_vary and not dgr_vary:
        likelihood_filename += "_width_center"
        results_filename += "_width_center"

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)
    elif not center_vary and width_vary and not dgr_vary:
        likelihood_filename += "_width"
        results_filename += "_width"

        velocity_centers = np.arange(5, 6, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)

    # define directory locations
    # --------------------------
    output_dir = "/d/bip3/ezbc/taurus/data/python_output/nhi_av/"
    figure_dir = "/d/bip3/ezbc/taurus/figures/hi_velocity_range/"
    av_dir = "/d/bip3/ezbc/taurus/data/av/"
    hi_dir = "/d/bip3/ezbc/taurus/data/hi/"
    co_dir = "/d/bip3/ezbc/taurus/data/co/"
    core_dir = "/d/bip3/ezbc/taurus/data/python_output/core_properties/"
    property_dir = "/d/bip3/ezbc/taurus/data/python_output/"
    region_dir = "/d/bip3/ezbc/taurus/data/python_output/ds9_regions/"
    likelihood_dir = "/d/bip3/ezbc/taurus/data/python_output/nhi_av/"

    # load Planck Av and GALFA HI images, on same grid
    av_data, av_header = load_fits(av_dir + "taurus_av_planck_5arcmin.fits", return_header=True)

    av_data_error, av_error_header = load_fits(av_dir + "taurus_av_error_planck_5arcmin.fits", return_header=True)

    hi_data, h = load_fits(hi_dir + "taurus_hi_galfa_cube_regrid_planckres.fits", return_header=True)

    co_data, co_header = load_fits(co_dir + "taurus_co_cfa_cube_regrid_planckres.fits", return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)
    co_velocity_axis = make_velocity_axis(co_header)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(
            cube=hi_data,
            velocity_axis=velocity_axis,
            velocity_noise_range=[90, 110],
            header=h,
            Tsys=30.0,
            filename=hi_dir + noise_cube_filename,
        )
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename, return_header=True)

    # define core properties
    with open(core_dir + core_property_file, "r") as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, "r") as f:
        global_props = json.load(f)

    # Change WCS coords to pixel coords of images
    cores = convert_core_coordinates(cores, h)
    cores = load_ds9_region(cores, filename_base=region_dir + "taurus_av_boxes_", header=h)
    global_props = convert_limit_coordinates(global_props, header=av_header)

    print ("\nCalculating likelihoods globally")

    co_data_nonans = np.copy(co_data)
    co_data_nonans[np.isnan(co_data_nonans)] = 0.0

    # Set velocity center as CO peak
    if not center_vary:
        co_spectrum = np.sum(co_data_nonans, axis=(1, 2))
        co_avg_vel = np.average(co_velocity_axis, weights=co_spectrum)
        co_peak_vel = co_velocity_axis[co_spectrum == np.max(co_spectrum)]
        # velocity_centers = np.arange(co_peak_vel, co_peak_vel + 1, 1)
        velocity_centers = np.arange(co_avg_vel, co_avg_vel + 1, 1)

        print ("\nVelocity center from CO = " + "{0:.2f} km/s".format(velocity_centers[0]))

    # Create mask where CO is present
    core_mask = np.zeros(av_data.shape)
    # for core in cores:
    #    # Grab the mask
    #    core_mask += myg.get_polygon_mask(av_data,
    #            cores[core]['box_vertices_rotated'])

    # Calc moment 0 map of CO
    co_mom0 = np.sum(co_data_nonans, axis=0)

    # calc noise without any emission if CO threshold not already set
    if co_thres is None:
        co_noise = calc_co_noise(co_mom0, global_props)
        co_thres = 2.0 * co_noise

    # Derive relevant region
    pix = global_props["region_limit"]["pixel"]
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]), (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_data, region_vertices)

    print ("\nRegion size = " + "{0:.0f} pix".format(region_mask[region_mask == 1].size))

    # Get indices which trace only atomic gas, i.e., no CO emission
    if av_and_co_mask:
        indices = ((co_mom0 < co_thres) & (av_data < av_thres)) & (region_mask == 1)
    elif not av_and_co_mask:
        indices = (co_mom0 < co_thres) & (region_mask == 1)
        av_thres = None

    # Write mask of pixels not used
    mask = ~indices

    # Mask global data with CO indices
    hi_data_sub = np.copy(hi_data[:, indices])
    noise_cube_sub = np.copy(noise_cube[:, indices])
    av_data_sub = np.copy(av_data[indices])
    av_error_data_sub = np.copy(av_data_error[indices])

    # import matplotlib.pyplot as plt
    # av_plot_data = np.copy(av_data)
    # av_plot_data[~indices] = np.nan
    # plt.imshow(av_plot_data, origin='lower')
    # plt.contour(co_mom0, levels=(6, 12, 24), origin='lower')
    # plt.show()
    # plt.clf()
    # plt.close()

    # Plot the masked image
    av_data_masked = np.copy(av_data)
    av_data_masked[~indices] = np.nan
    figure_types = ["png"]
    for figure_type in figure_types:
        plot_av_image(
            av_image=av_data_masked,
            header=av_header,
            savedir=figure_dir + "../maps/",
            limits=global_props["region_limit"]["pixel"],
            filename="taurus_dgr_co_masked_map." + figure_type,
            show=0,
        )

    # Set global variables
    hi_cube = hi_data_sub
    hi_velocity_axis = velocity_axis
    hi_noise_cube = noise_cube_sub
    av_image = av_data_sub
    av_image_error = av_error_data_sub

    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    # likelihoodelate each core region Av and N(HI) for velocity ranges
    vel_range_confint, dgr_confint, likelihoods, center_likelihood, width_likelihood, dgr_likelihood, center_max, width_max, dgr_max = calc_likelihood_hi_av(
        dgrs=dgrs,
        velocity_centers=velocity_centers,
        velocity_widths=velocity_widths,
        return_likelihoods=True,
        plot_results=True,
        results_filename=results_filename,
        likelihood_filename=likelihood_dir + likelihood_filename + "_global.fits",
        clobber=clobber,
        conf=conf,
        contour_confs=contour_confs,
        multithread=multithread,
    )
    vel_range_max = (center_max - width_max / 2.0, center_max + width_max / 2.0)

    print ("\nHI velocity integration range:")
    print ("%.1f to %.1f km/s" % (vel_range_confint[0], vel_range_confint[1]))
    print ("\nDGR:")
    print ("%.1f x 10^-20 cm^2 mag" % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = calculate_nhi(
        cube=hi_data, velocity_axis=hi_velocity_axis, velocity_range=vel_range_max, noise_cube=noise_cube
    )
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = (av_image_model == av_image_model) & (av_data == av_data)
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask]) ** 2 / av_data_error[~mask] ** 2) / av_data[~mask].size

    print (
        "\nTotal number of pixels in analysis, after masking = " + "{0:.0f}".format(npix)
    ) + "\nGiven a CO threshold of {0:.2f} K km/s".format(co_thres) + "\nand a Av threshold of {0:.2f} mag".format(
        av_thres
    )

    print ("\nReduced chi^2 = {0:.1f}".format(chisq))

    # Write results to global properties
    global_props["dust2gas_ratio"] = {}
    global_props["dust2gas_ratio_error"] = {}
    global_props["hi_velocity_width"] = {}
    global_props["dust2gas_ratio_max"] = {}
    global_props["hi_velocity_center_max"] = {}
    global_props["hi_velocity_width_max"] = {}
    global_props["hi_velocity_range_max"] = {}
    global_props["av_threshold"] = {}
    global_props["co_threshold"] = {}
    global_props["hi_velocity_width"]["value"] = vel_range_confint[1] - vel_range_confint[0]
    global_props["hi_velocity_width"]["unit"] = "km/s"
    global_props["hi_velocity_range"] = vel_range_confint[0:2]
    global_props["hi_velocity_range_error"] = vel_range_confint[2:]
    global_props["dust2gas_ratio"]["value"] = dgr_confint[0]
    global_props["dust2gas_ratio_error"]["value"] = dgr_confint[1:]
    global_props["dust2gas_ratio_max"]["value"] = dgr_max
    global_props["hi_velocity_center_max"]["value"] = center_max
    global_props["hi_velocity_width_max"]["value"] = width_max
    global_props["hi_velocity_range_max"]["value"] = vel_range_max
    global_props["hi_velocity_range_conf"] = conf
    global_props["center_likelihood"] = center_likelihood.tolist()
    global_props["width_likelihood"] = width_likelihood.tolist()
    global_props["dgr_likelihood"] = dgr_likelihood.tolist()
    global_props["vel_centers"] = velocity_centers.tolist()
    global_props["vel_widths"] = velocity_widths.tolist()
    global_props["dgrs"] = dgrs.tolist()
    global_props["likelihoods"] = likelihoods.tolist()
    global_props["av_threshold"]["value"] = av_thres
    global_props["av_threshold"]["unit"] = "mag"
    global_props["co_threshold"]["value"] = co_thres
    global_props["co_threshold"]["unit"] = "K km/s"
    global_props["chisq"] = chisq
    global_props["npix"] = npix
    global_props["mask"] = mask.tolist()

    with open(property_dir + global_property_file, "w") as f:
        json.dump(global_props, f)
def main(dgr=None, vel_range=None, vel_range_type='single', region=None,
        av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    #import pyfits as fits
    from astropy.io import fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system,path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename,
            header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    props['plot_limit']['wcs'] = (((5, 20, 0), (19, 0 ,0)),
                                  ((2, 30, 0), (37, 0, 0))
                                  )

    props['region_name_pos'] = {
             #'taurus 1' : {'wcs' : ((3, 50,  0),
             #                       (21.5, 0, 0)),
             #             },
             #'taurus 2' : {'wcs' : ((5, 10,  0),
             #                       (21.5, 0, 0)),
             #             },
             'taurus' : {'wcs' : ((4, 40,  0),
                                  (21, 0, 0)),
                          },
             'perseus' : {'wcs' : ((3, 30,  0),
                                   (26, 0, 0)),
                          },
             #'perseus 1' : {'wcs' : ((3, 0,  0),
             #                      (34, 0, 0)),
             #             },
             #'perseus 2' : {'wcs' : ((3, 10,  0),
             #                      (22.5, 0, 0)),
             #             },
             'california' : {'wcs' : ((4, 28,  0),
                                      (34, 0, 0)),
                             },
             }

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits',
                                              'plot_limit',
                                              'region_name_pos'))

    props['plot_limit']['wcs'] = [15*(5+20./60), 15*(2+30./60.), 17, 38.5]


    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'multicloud_av_nhi_map' + \
                    '.%s' % figure_type
                    #av_data_type + \
                    #'dgr{0:.3f}_'.format(dgr) + \
                    #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                    #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/multicloud_av_model_map' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(vel_range[i, 0],
                                                              vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'multicloud_av_model_map_region{0:.0f}'.format(region) + \
                       '.{0:s}'.format(figure_type)

        filename = 'av_map'
        filename = figure_dir + 'maps/' + filename + '.' + figure_type
        print('\nSaving Av model image to \n' + filename)

        plot_av_image(av_image=av_image,
                       header=av_header,
                       limits=[15*(5+20./60), 15*(2+30./60.), 17, 38.5],
                       limits_type='wcs',
                       regions=props['regions'],
                       props=props,
                       av_vlimits=(0,15.5),
                       filename=filename,
                       show=False)

        if 0:
            filename = 'av_nhi_map'
            filename = figure_dir + 'maps/' + filename + '.' + figure_type
            print('\nSaving NHI + Av maps to \n' + filename)
            plot_nhi_image(nhi_image=nhi_image,
                           header=av_header,
                           av_image=av_image,
                           limits=props['plot_limit']['wcs'],
                           limits_type='wcs',
                           regions=props['regions'],
                           props=props,
                           hi_vlimits=(0,20),
                           av_vlimits=(0,15.5),
                           #av_vlimits=(0.1,30),
                           filename=filename,
                           show=False)
def main(dgr=None, vel_range=(-5, 15), vel_range_type="single", region=None, av_data_type="planck"):
    """ Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    """

    # import external modules
    import pyfits as fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = "multicloud_hi_galfa_cube_regrid_planckres_noise.fits"

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    # av_data_type = 'lee12'
    # av_data_type = 'planck'

    # Global parameter file
    prop_file = "multicloud_global_properties"

    # Which cores to include in analysis?
    cores_to_keep = [  # taur
        "L1495",
        "L1495A",
        "B213",
        "L1498",
        "B215",
        "B18",
        "B217",
        "B220-1",
        "B220-2",
        "L1521",
        "L1524",
        "L1527-1",
        "L1527-2",
        # Calif
        "L1536",
        "L1483-1",
        "L1483-2",
        "L1482-1",
        "L1482-2",
        "L1478-1",
        "L1478-2",
        "L1456",
        "NGC1579",
        #'L1545',
        #'L1517',
        #'L1512',
        #'L1523',
        #'L1512',
        # Pers
        "B5",
        "IC348",
        "B1E",
        "B1",
        "NGC1333",
        "B4",
        "B3",
        "L1455",
        "L1448",
    ]

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {"wcs": (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))), "pixel": ()}
    elif region == 2:
        region_limit = {"wcs": (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))), "pixel": ()}
    elif region == 3:
        region_limit = {"wcs": (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))), "pixel": ()}
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = "/d/bip3/ezbc/multicloud/data/python_output/nhi_av/"
    figure_dir = "/d/bip3/ezbc/multicloud/figures/"
    av_dir = "/d/bip3/ezbc/multicloud/data/av/"
    hi_dir = "/d/bip3/ezbc/multicloud/data/hi/"
    co_dir = "/d/bip3/ezbc/multicloud/data/co/"
    core_dir = "/d/bip3/ezbc/multicloud/data/python_output/core_properties/"
    property_dir = "/d/bip3/ezbc/multicloud/data/python_output/"
    region_dir = "/d/bip3/ezbc/multicloud/data/python_output/"

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == "lee12_2mass":
        print("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_lee12_2mass_regrid_planckres.fits", return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == "lee12_iris":
        print("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_lee12_iris_regrid_planckres.fits", return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == "planck_rad":
        print("\nLoading Planck data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_planck_radiance_5arcmin.fits", return_header=True)
        av_image_error, av_error_header = load_fits(
            av_dir + "multicloud_av_error_planck_radiance_5arcmin.fits", return_header=True
        )
    else:
        print("\nLoading Planck data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_planck_5arcmin.fits", return_header=True)

        av_image_error, av_error_header = load_fits(
            av_dir + "multicloud_av_error_planck_5arcmin.fits", return_header=True
        )

    hi_cube, hi_header = load_fits(hi_dir + "multicloud_hi_galfa_cube_regrid_planckres.fits", return_header=True)

    co_data, co_header = load_fits(co_dir + "multicloud_co_cfa_cube_regrid_planckres.fits", return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += "_region{0:.0f}".format(region)
        results_filename += "_region{0:.0f}".format(region)

    print("\nLoading global property file {0:s}.txt".format(prop_file))
    with open(property_dir + prop_file + ".txt", "r") as f:
        props = json.load(f)

    # Define velocity range
    props["hi_velocity_range"] = vel_range

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(
            cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_noise_range=[90, 110],
            header=hi_header,
            Tsys=30.0,
            filename=hi_dir + noise_cube_filename,
        )
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename, header=True)

    # create nhi image
    nhi_image = calculate_nhi(
        cube=hi_cube, velocity_axis=velocity_axis, velocity_range=vel_range, header=hi_header, noise_cube=hi_noise_cube
    )

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(
        props, header=av_header, coords=("region_limit", "co_noise_limits", "plot_limit", "region_name_pos")
    )

    # Load cloud division regions from ds9
    props = load_ds9_region(props, filename=region_dir + "multicloud_divisions.reg", header=av_header)

    # Derive relevant region
    pix = props["region_limit"]["pixel"]
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]), (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print("\nRegion size = " + "{0:.0f} pix".format(region_mask[region_mask == 1].size))

    if vel_range_type == "single":
        print("\nHI velocity integration range:")
        print("%.1f to %.1f km/s" % (vel_range[0], vel_range[1]))
    elif vel_range_type == "multiple":
        print("\nHI velocity integration ranges:")
        for i in xrange(0, vel_range.shape[0]):
            print("%.1f to %.1f km/s" % (vel_range[i, 0], vel_range[i, 1]))

    cloud_dict = {"taurus": {}, "perseus": {}, "california": {}}

    # load Planck Av and GALFA HI images, on same grid
    for cloud in cloud_dict:

        print("\nLoading core properties for {0:s}".format(cloud))

        file_dir = "/d/bip3/ezbc/{0:s}/data/av/".format(cloud)

        # define core properties
        with open(
            "/d/bip3/ezbc/{0:s}/data/python_output/".format(cloud)
            + "core_properties/{0:s}_core_properties.txt".format(cloud),
            "r",
        ) as f:
            cores = json.load(f)

        # Load core regions from DS9 files
        if cloud == "aldobaran":
            region_cloud = "california"
        else:
            region_cloud = cloud
        core_filename = region_dir.replace("multicloud", region_cloud) + "/ds9_regions/{0:s}_av_poly_cores".format(
            region_cloud
        )

        cores = load_ds9_core_region(cores, filename_base=core_filename, header=av_header)

        cores = convert_core_coordinates(cores, av_header)

        # Remove cores
        cores_to_remove = []
        for core in cores:
            if core not in cores_to_keep:
                cores_to_remove.append(core)
        for core_to_remove in cores_to_remove:
            del cores[core_to_remove]

        cloud_dict[cloud]["cores"] = cores

    # Plot
    figure_types = ["png", "pdf"]
    for figure_type in figure_types:
        filename = "multicloud_av_cores_map" + ".{0:s}".format(figure_type)

        print("\nSaving Av cores map to \n" + filename)

        plot_cores_map(
            header=av_header,
            av_image=av_image,
            limits=props["plot_limit"]["pixel"],
            regions=props["regions"],
            cloud_dict=cloud_dict,
            cores_to_keep=cores_to_keep,
            props=props,
            hi_vlimits=(0, 20),
            av_vlimits=(0, 16),
            # av_vlimits=(0.1,30),
            savedir=figure_dir + "maps/",
            filename=filename,
            show=False,
        )
def main(dgr=None, vel_range=(-5, 15), vel_range_type='single', region=None,
        av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system,path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Which cores to include in analysis?
    cores_to_keep = [# taur
                     'L1495',
                     'L1495A',
                     'B213',
                     'L1498',
                     'B215',
                     'B18',
                     'B217',
                     'B220-1',
                     'B220-2',
                     'L1521',
                     'L1524',
                     'L1527-1',
                     'L1527-2',
                     # Calif
                     'L1536',
                     'L1483-1',
                     'L1483-2',
                     'L1482-1',
                     'L1482-2',
                     'L1478-1',
                     'L1478-2',
                     'L1456',
                     'NGC1579',
                     #'L1545',
                     #'L1517',
                     #'L1512',
                     #'L1523',
                     #'L1512',
                     # Pers
                     'B5',
                     'IC348',
                     'B1E',
                     'B1',
                     'NGC1333',
                     'B4',
                     'B3',
                     'L1455',
                     'L1448',
                     ]

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_tau353':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nLoading global property file {0:s}.txt'.format(prop_file))
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    # Define velocity range
    props['hi_velocity_range'] = vel_range

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename,
            header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    props['plot_limit']['wcs'] = (((5, 20, 0), (19, 0 ,0)),
                                  ((2, 30, 0), (37, 0, 0))
                                  )


    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits',
                                              'plot_limit',
                                              'region_name_pos'))

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))


    cloud_dict = {'taurus' : {},
                  'perseus' : {},
                  'california' : {},
                  }

    # load Planck Av and GALFA HI images, on same grid
    for cloud in cloud_dict:

        print('\nLoading core properties for {0:s}'.format(cloud))

        file_dir = '/d/bip3/ezbc/{0:s}/data/av/'.format(cloud)

        # define core properties
        with open('/d/bip3/ezbc/{0:s}/data/python_output/'.format(cloud) + \
                  'core_properties/{0:s}_core_properties.txt'.format(cloud),
                  'r') as f:
             cores = json.load(f)

        # Load core regions from DS9 files
        if cloud == 'aldobaran':
            region_cloud = 'california'
        else:
            region_cloud = cloud
        core_filename = '/d/bip3/ezbc/' + region_cloud + '/data/python_output' + \
                        '/ds9_regions/{0:s}_av_poly_cores'.format(region_cloud)

        cores = load_ds9_core_region(cores,
                                     filename_base=core_filename,
                                     header=av_header)

        cores = convert_core_coordinates(cores, av_header)

        # Remove cores
        cores_to_remove = []
        for core in cores:
            if core not in cores_to_keep:
                cores_to_remove.append(core)
        for core_to_remove in cores_to_remove:
            del cores[core_to_remove]

        cloud_dict[cloud]['cores'] = cores

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        filename = 'av_cores_map' + \
                   '.{0:s}'.format(figure_type)

        print('\nSaving Av cores map to \n' + filename)

        plot_cores_map(header=av_header,
                       av_image=av_image,
                       limits=props['plot_limit']['pixel'],
                       regions=props['regions'],
                       cloud_dict=cloud_dict,
                       cores_to_keep=cores_to_keep,
                       props=props,
                       hi_vlimits=(0,20),
                       av_vlimits=(0,16),
                       #av_vlimits=(0.1,30),
                       savedir=figure_dir + 'maps/',
                       filename=filename,
                       show=False)
def run_cloud_analysis(global_args,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
                   'figure_dir': figure_dir,
                   'cloud_name': cloud_name,
                   'filename_base': filename_base,
                   'plot_diagnostics': global_args['plot_diagnostics'],
                   #'av_nhi_contour': av_nhi_contour,
                   'av_nhi_contour': True,
                   'av_nhi_limits': [0, 20, -1, 9],
                   #'av_nhi_limits': None,
                    }


    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])


    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)


    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)


    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                             statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
                       'velocity_range': velocity_range,
                       'gauss_fits': gauss_fits,
                       'comp_num': comp_num,
                       'hi_range_error': hi_range_error,
                       'vel_range': velocity_range,
                       'gauss_fit_kwargs': gauss_fit_kwargs,
                       }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_spectrum,
                 gauss_fits=gauss_fits,
                 comp_num=comp_num,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_dr1_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_dr1_spectrum,
                 gauss_fits=gauss_fits_dr1,
                 comp_num=comp_num_dr1,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range_dr1,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(nhi_image=nhi_image / nhi_image_dr1,
                   header=hi_header,
                   limits=[65, 45, 25, 35],
                   filename=filename,
                   show=0,
                   cb_text='DR2 / DR1',
                   #hi_vlimits=[0.91, 0.93],
                   )
Exemple #16
0
def main(dgr=None,
         vel_range=None,
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    #import pyfits as fits
    from astropy.io import fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                                             velocity_axis=velocity_axis,
                                             velocity_noise_range=[90, 110],
                                             header=hi_header,
                                             Tsys=30.,
                                             filename=hi_dir +
                                             noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir +
                                                   noise_cube_filename,
                                                   header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    props['plot_limit']['wcs'] = (((5, 20, 0), (19, 0, 0)), ((2, 30, 0),
                                                             (37, 0, 0)))

    props['region_name_pos'] = {
        #'taurus 1' : {'wcs' : ((3, 50,  0),
        #                       (21.5, 0, 0)),
        #             },
        #'taurus 2' : {'wcs' : ((5, 10,  0),
        #                       (21.5, 0, 0)),
        #             },
        'taurus': {
            'wcs': ((4, 40, 0), (21, 0, 0)),
        },
        'perseus': {
            'wcs': ((3, 30, 0), (26, 0, 0)),
        },
        #'perseus 1' : {'wcs' : ((3, 0,  0),
        #                      (34, 0, 0)),
        #             },
        #'perseus 2' : {'wcs' : ((3, 10,  0),
        #                      (22.5, 0, 0)),
        #             },
        'california': {
            'wcs': ((4, 28, 0), (34, 0, 0)),
        },
    }

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits', 'plot_limit',
                                              'region_name_pos'))

    props['plot_limit']['wcs'] = [
        15 * (5 + 20. / 60), 15 * (2 + 30. / 60.), 17, 38.5
    ]

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'multicloud_av_nhi_map' + \
                    '.%s' % figure_type
                #av_data_type + \
                #'dgr{0:.3f}_'.format(dgr) + \
                #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/multicloud_av_model_map' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(
                        vel_range[i, 0], vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'multicloud_av_model_map_region{0:.0f}'.format(region) + \
                       '.{0:s}'.format(figure_type)

        filename = 'av_map'
        filename = figure_dir + 'maps/' + filename + '.' + figure_type
        print('\nSaving Av model image to \n' + filename)

        plot_av_image(
            av_image=av_image,
            header=av_header,
            limits=[15 * (5 + 20. / 60), 15 * (2 + 30. / 60.), 17, 38.5],
            limits_type='wcs',
            regions=props['regions'],
            props=props,
            av_vlimits=(0, 15.5),
            filename=filename,
            show=False)

        if 0:
            filename = 'av_nhi_map'
            filename = figure_dir + 'maps/' + filename + '.' + figure_type
            print('\nSaving NHI + Av maps to \n' + filename)
            plot_nhi_image(
                nhi_image=nhi_image,
                header=av_header,
                av_image=av_image,
                limits=props['plot_limit']['wcs'],
                limits_type='wcs',
                regions=props['regions'],
                props=props,
                hi_vlimits=(0, 20),
                av_vlimits=(0, 15.5),
                #av_vlimits=(0.1,30),
                filename=filename,
                show=False)