示例#1
0
def calculate_noise_cube(cube=None, velocity_axis=None,
            velocity_noise_range=[-110,-90,90,110], header=None, Tsys=30.,
            filename=None):

    """ Calcuates noise envelopes for each pixel in a cube
    """

    import numpy as np
    import pyfits as pf
    from mycoords import make_velocity_axis

    if velocity_axis is None:
        velocity_axis = make_velocity_axis(header)


    noise_cube = np.zeros(cube.shape)
    for i in range(cube.shape[1]):
        for j in range(cube.shape[2]):
            profile = cube[:,i,j]
            noise = calculate_noise(profile, velocity_axis,
                    velocity_noise_range)
            #noise = 0.1 # measured in line free region
            noise_cube[:,i,j] = calculate_noise_scale(Tsys,
                    profile, noise=noise)

    if filename is not None:
        pf.writeto(filename, noise_cube, header=header)

    return noise_cube
示例#2
0
def plot_co_spectra(results,):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_co_spectra'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    co_filename = cloud.co_filename

    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        co_filename = co_filename.replace('.fits', '_bin.fits')

    exists = \
        check_file(co_filename, clobber=False)

    if not exists:
        co_data, co_header = fits.getdata(co_filename,
                                                      header=True,
                                                      )
        cloud.co_data, cloud.co_header = \
            bin_image(co_data,
                      binsize=(1, cloud.binsize, cloud.binsize),
                      header=co_header,
                      statistic=np.nanmean)

        fits.writeto(cloud.co_filename.replace('.fits', '_bin.fits'),
                     cloud.co_data,
                     cloud.co_header,
                     )
    else:
        cloud.co_data, cloud.co_header = \
            fits.getdata(co_filename,
                                                      header=True,
                                                      )

    cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(cloud.av_filename_bin, header=True)
    cloud.load_region(cloud.region_filename, header=cloud.av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    cloudpy.plot_hi_spectrum(cloud,
                      filename=filename_base + '.png',
                      limits=[-50, 30, -10, 70],
                      plot_co=plot_co,
                      hi_mask=hi_mask,
                      co_mask=co_mask,
                      )
示例#3
0
def add_data(data_dict, filename=None):
    ''' Adds the cube, header, velocity axis, and positions of each pixel to
    the data_dict.

    '''

    from astropy.io import fits
    from mycoords import make_velocity_axis

    cube, header = fits.getdata(filename, header=True)
    velocity_axis = make_velocity_axis(header)

    data_dict['cube'] = cube
    data_dict['header'] = header
    data_dict['velocity_axis'] = velocity_axis

    # Add positions in cube
    add_positions_to_data(data_dict)
def reload_wcs_positions(results_dict):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits
    from astropy import units as u
    from astropy.coordinates import SkyCoord
    from astropy import wcs

    # Plot names
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG = DIR_FIG + 'nhi_map_data_synth.png'

    # Load HI Cube
    DIR_HI = '/d/bip3/ezbc/multicloud/data/hi/'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    print('\nLoading data cube...')
    cube, header = fits.getdata(DIR_HI + FILENAME_CUBE, header=True)
    velocity_axis = make_velocity_axis(header)

    header['CUNIT3'] = 'm/s'
    header['CUNIT2'] = 'deg'
    header['CUNIT1'] = 'deg'
    header['CTYPE3'] = 'VOPT'
    header['SPECSYS'] = 'LSRK'

    shape = (cube.shape[1] * cube.shape[2], 2)

    # Create header object
    w = wcs.WCS(header)

    # add position for each spectrum
    for i in xrange(len(results_dict['results'])):
        coords_pix = results_dict['positions']['pix'][i][::-1]
        coords_wcs = w.wcs_pix2world(([coords_pix[0], coords_pix[1], 0], ),
                                     0)[0]
        results_dict['positions']['wcs'][i] = np.array(coords_wcs[:2])

    return results_dict
def reload_wcs_positions(results_dict):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits
    from astropy import units as u
    from astropy.coordinates import SkyCoord
    from astropy import wcs

    # Plot names
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG = DIR_FIG + 'nhi_map_data_synth.png'

    # Load HI Cube
    DIR_HI = '/d/bip3/ezbc/multicloud/data/hi/'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    print('\nLoading data cube...')
    cube, header = fits.getdata(DIR_HI + FILENAME_CUBE, header=True)
    velocity_axis = make_velocity_axis(header)

    header['CUNIT3'] = 'm/s'
    header['CUNIT2'] = 'deg'
    header['CUNIT1'] = 'deg'
    header['CTYPE3'] = 'VOPT'
    header['SPECSYS'] = 'LSRK'

    shape = (cube.shape[1] * cube.shape[2], 2)

    # Create header object
    w = wcs.WCS(header)

    # add position for each spectrum
    for i in xrange(len(results_dict['results'])):
        coords_pix = results_dict['positions']['pix'][i][::-1]
        coords_wcs = w.wcs_pix2world(([coords_pix[0], coords_pix[1], 0],), 0)[0]
        results_dict['positions']['wcs'][i] = np.array(coords_wcs[:2])

    return results_dict
示例#6
0
def main(av_data_type='planck'):

    # Import external modules
    # -----------------------
    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube
    #from astropy.io import fits
    import pyfits as fits
    import matplotlib.pyplot as plt

    # Set parameters
    # --------------
    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    likelihood_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)
    results_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)

    # Name of HI noise cube
    noise_cube_filename = 'taurus_hi_galfa_cube_regrid_planckres_noise'

    # Threshold for converging DGR
    threshold_delta_dgr = 0.0005

    # Number of white noise standard deviations with which to fit the
    # residuals in iterative masking
    resid_width_scale = 2.0

    # Name of property files results are written to
    global_property_file = 'taurus_global_properties.txt'

    # Likelihood axis resolutions
    vel_widths = np.arange(1, 30, 2*0.16667)
    dgrs = np.arange(0.01, 0.2, 1e-3)

    # Velocity range over which to integrate HI for deriving the mask
    vel_range = (-10, 10)

    # Use binned image?
    use_binned_image = False

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/taurus/figures/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    co_dir = '/d/bip3/ezbc/taurus/data/co/'
    core_dir = '/d/bip3/ezbc/taurus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/taurus/data/python_output/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'

    # Load data
    # ---------
    if use_binned_image:
        bin_string = '_bin'
    else:
        bin_string = ''
    noise_cube_filename += bin_string

    av_data, av_header = fits.getdata(av_dir + \
                            'taurus_av_planck_5arcmin' + bin_string + '.fits',
                                      header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'taurus_av_error_planck_5arcmin' + bin_string + '.fits',
            header=True)
    #av_data_error = (100 * 0.025**2) * np.ones(av_data_error.shape)
    #av_data_error *= 10.0

    hi_data, hi_header = fits.getdata(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres' + bin_string + '.fits',
            header=True)

    # Load global properties
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    # Prepare data products
    # ---------------------
    # Change WCS coords to pixel coords of images
    global_props = convert_limit_coordinates(global_props, header=av_header)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename + '.fits'):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename + '.fits')
    else:
        noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename,
            header=True)

    # Derive relevant region
    pix = global_props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block off region
    region_mask = myg.get_polygon_mask(av_data, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    # Derive mask by excluding correlated residuals
    # ---------------------------------------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    av_model, mask, dgr = iterate_residual_masking(
                             nhi_image=nhi_image,
                             av_data=av_data,
                             av_data_error=av_data_error,
                             vel_range=vel_range,
                             threshold_delta_dgr=threshold_delta_dgr,
                             resid_width_scale=resid_width_scale,
                             plot_progress=False
                             )

    # Combine region mask with new mask
    mask += np.logical_not(region_mask)

    # Derive center velocity from hi
    # ------------------------------
    hi_spectrum = np.sum(hi_data[:, ~mask], axis=(1))
    vel_center = np.array((np.average(hi_vel_axis,
                           weights=hi_spectrum**2),))[0]
    print('\nVelocity center from HI = ' +\
            '{0:.2f} km/s'.format(vel_center))

    # Perform likelihood calculation of masked images
    # -----------------------------------------------
    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    results = calc_likelihoods(
                     hi_cube=hi_data[:, ~mask],
                     hi_vel_axis=hi_vel_axis,
                     av_image=av_data[~mask],
                     av_image_error=av_data_error[~mask],
                     vel_center=vel_center,
                     vel_widths=vel_widths,
                     dgrs=dgrs,
                     results_filename='',
                     return_likelihoods=True,
                     likelihood_filename=None,
                     clobber=False,
                     conf=conf,
                     )

    # Unpack output of likelihood calculation
    (vel_range_confint, width_confint, dgr_confint, likelihoods,
            width_likelihood, dgr_likelihood, width_max, dgr_max,
            vel_range_max) = results

    print('\nHI velocity integration range:')
    print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                 vel_range_confint[1]))
    print('\nDGR:')
    print('%.1f x 10^-20 cm^2 mag' % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_range=vel_range_max,
                noise_cube=noise_cube,
                return_nhi_error=True)
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = ((av_image_model == av_image_model) & \
               (av_data == av_data))
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask])**2 / \
            av_data_error[~mask]**2) / av_data[~mask].size

    print('\nTotal number of pixels in analysis, after masking = ' + \
            '{0:.0f}'.format(npix))

    print('\nReduced chi^2 = {0:.1f}'.format(chisq))

    # Write results to global properties
    global_props['dust2gas_ratio'] = {}
    global_props['dust2gas_ratio_error'] = {}
    global_props['hi_velocity_width'] = {}
    global_props['hi_velocity_width_error'] = {}
    global_props['dust2gas_ratio_max'] = {}
    global_props['hi_velocity_center_max'] = {}
    global_props['hi_velocity_width_max'] = {}
    global_props['hi_velocity_range_max'] =  {}
    global_props['av_threshold'] = {}
    global_props['co_threshold'] = {}
    global_props['hi_velocity_width']['value'] = width_confint[0]
    global_props['hi_velocity_width']['unit'] = 'km/s'
    global_props['hi_velocity_width_error']['value'] = width_confint[1:]
    global_props['hi_velocity_width_error']['unit'] = 'km/s'
    global_props['hi_velocity_range'] = vel_range_confint[0:2]
    global_props['hi_velocity_range_error'] = vel_range_confint[2:]
    global_props['dust2gas_ratio']['value'] = dgr_confint[0]
    global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
    global_props['dust2gas_ratio_max']['value'] = dgr_max
    global_props['hi_velocity_center_max']['value'] = vel_center
    global_props['hi_velocity_width_max']['value'] = width_max
    global_props['hi_velocity_range_max']['value'] = vel_range_max
    global_props['hi_velocity_range_conf'] = conf
    global_props['width_likelihood'] = width_likelihood.tolist()
    global_props['dgr_likelihood'] = dgr_likelihood.tolist()
    global_props['vel_centers'] = [vel_center,]
    global_props['vel_widths'] = vel_widths.tolist()
    global_props['dgrs'] = dgrs.tolist()
    global_props['likelihoods'] = likelihoods.tolist()
    global_props['av_threshold']['value'] = None
    global_props['av_threshold']['unit'] = 'mag'
    global_props['co_threshold']['value'] = None
    global_props['co_threshold']['unit'] = 'K km/s'
    global_props['chisq'] = chisq
    global_props['npix'] = npix
    global_props['mask'] = mask.tolist()

    with open(property_dir + global_property_file, 'w') as f:
        json.dump(global_props, f)

    # Plot likelihood space
    plot_likelihoods_hist(global_props,
                          plot_axes=('widths', 'dgrs'),
                          show=0,
                          returnimage=False,
                          filename=results_filename + '_wd.png',
                          contour_confs=contour_confs)

    plt.clf(); plt.close()
    nhi_image_copy = np.copy(nhi_image)
    nhi_image_copy[mask] = np.nan
    av_image_copy = np.copy(av_data)
    resid_image = av_image_copy - nhi_image_copy * dgr
    plt.imshow(resid_image, origin='lower')
    plt.title(r'$A_V$ Data - Model')
    plt.colorbar()
    plt.show()
def main():

    import grid
    import numpy as np
    from myimage_analysis import calculate_nhi
    from mycoords import make_velocity_axis
    import pyfits as pf
    import mygeometry as myg
    import json

    # parameters used in script
    # -------------------------
    # Regions
    # Options are 'ds9' or 'av_gradient'
    box_method = 'av_gradient'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/dgr/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/cfa/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'

    av_data_planck, planck_header = pf.getdata(av_dir + \
                'perseus_av_planck_5arcmin.fits',
            header=True)
    av_data_error_planck, planck_header = pf.getdata(av_dir + \
                'perseus_av_error_planck_5arcmin.fits',
            header=True)

    # load GALFA HI
    hi_data, hi_header = pf.getdata(hi_dir + \
            'perseus_hi_galfa_cube_regrid_planckres.fits',
            header=True)
    velocity_axis = make_velocity_axis(hi_header)

    noise_cube, noise_header = pf.getdata(hi_dir + \
            'perseus_hi_galfa_cube_regrid_planckres_noise.fits', header=True)

    # define core properties
    with open(core_dir + 'perseus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    cores = convert_core_coordinates(cores, planck_header)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'perseus_av_boxes_',
            header = planck_header)

    # Initialize lists
    av_images = []
    av_error_images = []
    nhi_images = []
    nhi_error_images = []

    for core in cores:
        print('\nCalculating for core %s' % core)
        if box_method == 'ds9':
            # Grab the mask from the DS9 regions
            xy = cores[core]['box_center_pix']
            box_width = cores[core]['box_width']
            box_height = cores[core]['box_height']
            box_angle = cores[core]['box_angle']
            mask = myg.get_rectangular_mask(av_data_planck,
                    xy[0], xy[1],
                    width = box_width,
                    height = box_height,
                    angle = box_angle)
        elif box_method == 'av_gradient':
            mask = myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])
        else:
        	raise ValueError('Method for boxes is either ds9 or av_gradient')

        indices = mask == 1

        # Get only the relevant pixels to decrease computation time
        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_planck_sub = np.copy(av_data_planck[indices])
        av_data_error_planck_sub = np.copy(av_data_error_planck[indices])

        # Derive N(HI) image
        nhi_image, nhi_image_error = calculate_nhi(cube=hi_data_sub,
                velocity_axis=velocity_axis,
                noise_cube=noise_cube_sub,
                velocity_range=cores[core]['hi_velocity_range'])

        nhi_images.append(nhi_image)
        nhi_error_images.append(nhi_image_error)
        av_images.append(av_data_planck_sub)
        av_error_images.append(av_data_error_planck_sub)

    plot_av_vs_nhi_grid(nhi_images,
                        av_images,
                        av_error_images=av_error_images,
                        nhi_error_images=nhi_error_images,
                        #limits=[0,14, 0,10],
                        scale=['linear', 'log'],
                        savedir=figure_dir,
                        plot_type='scatter',
                        filename='perseus_av_vs_nhi_panels.png',
                        color_scale='linear')

    # Derive N(HI) image
    nhi_image, nhi_image_error = calculate_nhi(cube=hi_data,
            velocity_axis=velocity_axis,
            noise_cube=noise_cube,
            velocity_range=cores[core]['hi_velocity_range'])

    # Plot correlation, similar to Figure 3 of Paradis et al. (2012)
    plot_av_vs_nhi(nhi_image,
            av_data_planck,
            savedir=figure_dir,
            scale=['log', 'linear'],
            filename='perseus_av_vs_nhi_global.png',
            color_scale='linear')
def plot_nhi_maps(results_dict,
                  limits=None,
                  cube_data=None,
                  header=None,
                  load_synthetic_cube=False,
                  show=False,
                  velocity_range=[0, 500],
                  save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG_BASE = DIR_FIG + 'nhi_map_data_synth'

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(
        cube=cube_data,
        velocity_axis=velocity_axis,
        velocity_range=velocity_range,
    )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_dict['velocity_axis']
    if not load_synthetic_cube:
        print('\nCreating synthetic cube...')
        cube_synthetic = create_synthetic_cube(results_dict, cube_data)

        np.save(FILENAME_CUBE_SYNTH, cube_synthetic)
    else:
        print('\nLoading synthetic cube...')
        cube_synthetic = np.load(FILENAME_CUBE_SYNTH)

    # Create N(HI) synthetic
    nhi_synthetic = myia.calculate_nhi(
        cube=cube_synthetic,
        velocity_axis=velocity_axis,
        velocity_range=velocity_range,
    )

    v_limits = [0, np.max(nhi_data)]
    v_limits = [-1, 41]

    if 0:
        import matplotlib.pyplot as plt
        plt.close()
        plt.clf()
        fig, axes = plt.subplots(2, 1)
        axes[0].imshow(nhi_data, origin='lower')
        axes[1].imshow(nhi_synthetic, origin='lower')
        plt.show()

    if save_pdf:
        ext = '.pdf'
    else:
        ext = '.png'
    filename_fig = FILENAME_FIG_BASE + ext
    print('\nPlotting N(HI) maps...')
    print(filename_fig)
    # Plot the maps together
    plot_nhi_maps(
        nhi_data,
        nhi_synthetic,
        header=header,
        #limits=[278, -37, 282, -35],
        limits=limits,
        filename=filename_fig,
        nhi_1_vlimits=v_limits,
        nhi_2_vlimits=v_limits,
        show=show,
        vscale='linear',
    )
def main(
    dgr=None, vel_range=None, vel_range_type="single", region=None, av_data_type="planck", use_binned_images=False
):
    """ Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    """

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = "_bin"
    else:
        bin_string = ""

    # Name of noise cube
    noise_cube_filename = "california_hi_galfa_cube_regrid_planckres_noise" + bin_string + ".fits"

    # Name of property files results are written to
    prop_file = "california_global_properties_" + av_data_type + "_scaled"

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {"wcs": (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))), "pixel": ()}
    elif region == 2:
        region_limit = {"wcs": (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))), "pixel": ()}
    elif region == 3:
        region_limit = {"wcs": (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))), "pixel": ()}
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = "/d/bip3/ezbc/california/data/python_output/nhi_av/"
    figure_dir = "/d/bip3/ezbc/california/figures/av/"
    av_dir = "/d/bip3/ezbc/california/data/av/"
    hi_dir = "/d/bip3/ezbc/california/data/hi/"
    co_dir = "/d/bip3/ezbc/california/data/co/"
    core_dir = "/d/bip3/ezbc/california/data/python_output/core_properties/"
    property_dir = "/d/bip3/ezbc/california/data/python_output/"
    region_dir = "/d/bip3/ezbc/california/data/python_output/ds9_regions/"

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == "lee12_2mass":
        print ("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(
            av_dir + "california_av_lee12_2mass_regrid_planckres" + bin_string + ".fits", return_header=True
        )
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == "lee12_iris":
        print ("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(
            av_dir + "california_av_lee12_iris_regrid_planckres" + bin_string + ".fits", return_header=True
        )
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
        print ("\nLoading Planck data...")
        av_image, av_header = load_fits(
            av_dir + "california_av_planck_5arcmin" + bin_string + ".fits", return_header=True
        )

        av_image_error, av_error_header = load_fits(
            av_dir + "california_av_error_planck_5arcmin" + bin_string + ".fits", return_header=True
        )

    hi_cube, hi_header = load_fits(
        hi_dir + "california_hi_galfa_cube_regrid_planckres" + bin_string + ".fits", return_header=True
    )

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename, return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(
            co_dir + "california_co_cfa_cube_regrid_planckres" + bin_string + ".fits", return_header=True
        )

    # Load global properties of cloud
    # global properties written from script
    # 'av/california_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += "_region{0:.0f}".format(region)
        results_filename += "_region{0:.0f}".format(region)

    print ("\nReading global parameter file\n" + prop_file + ".txt")
    with open(property_dir + prop_file + ".txt", "r") as f:
        props = json.load(f)

    if vel_range is not None:
        props["hi_velocity_range"] = vel_range
    else:
        vel_width = props["hi_velocity_width_max"]["value"]
        vel_center = np.array(props["hi_velocity_center"]["value"])
        vel_center = -4.0
        vel_range = (vel_center - vel_width / 2.0, vel_center + vel_width / 2.0)
    if dgr is not None:
        props["dust2gas_ratio_max"]["value"] = dgr
    else:
        dgr = props["dust2gas_ratio_max"]["value"]
        intercept = props["intercept_max"]["value"]

    fit_params = {}
    fit_params["dgr"] = dgr
    fit_params["intercept"] = intercept

    # define core properties
    with open(core_dir + "california_core_properties.txt", "r") as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores, filename_base=region_dir + "california_av_boxes_", header=hi_header)

    # create nhi image
    nhi_image = calculate_nhi(
        cube=hi_cube, velocity_axis=velocity_axis, velocity_range=vel_range, header=hi_header, noise_cube=hi_noise_cube
    )

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == "single":
        print ("\nHI velocity integration range:")
        print ("%.1f to %.1f km/s" % (vel_range[0], vel_range[1]))
    elif vel_range_type == "multiple":
        print ("\nHI velocity integration ranges:")
        for i in xrange(0, vel_range.shape[0]):
            print ("%.1f to %.1f km/s" % (vel_range[i, 0], vel_range[i, 1]))

    print ("\nDGR:")
    print ("%.2f x 10^-20 cm^2 mag" % (dgr))

    print ("\nIntercept:")
    print ("%.2f mag" % (intercept))

    # Get mask and mask images
    mask = np.asarray(props["mask" + bin_string])

    mask_images = 1

    if mask_images:
        av_image[mask] = np.nan
        nhi_image[mask] = np.nan
        av_image_error[mask] = np.nan
        av_model[mask] = np.nan

    indices = (np.isnan(av_model)) & (np.isnan(av_image)) & (np.isnan(av_image_error))

    if 1:
        import matplotlib.pyplot as plt

        plt.imshow(av_image)
        plt.show()

    print ("\nTotal number of pixels after masking = " + str(props["npix"]))

    # Plot
    figure_types = ["png", "pdf"]
    for figure_type in figure_types:
        if region is None:
            filename = "california_av_vs_nhi_" + av_data_type + bin_string

        filename = figure_dir + filename + "." + figure_type

        print ("\nSaving Av model image to \n" + filename)

        plot_av_vs_nhi(
            nhi_image,
            av_image,
            av_error=av_image_error,
            # limits=[10**-1, 10**1.9, 10**0, 10**1.7],
            fit_params=fit_params,
            limits=[5, 40, -0.2, 2],
            # limits=[0,30,0,10],
            gridsize=(10, 10),
            # scale=('log', 'log'),
            # scale=('linear', 'linear'),
            filename=filename,
            contour_plot=not use_binned_images,
            std=0.22,
        )
def plot_cluster_nhi_panels(results_ref=None,
                            colors=None,
                            limits=None,
                            cube=None,
                            header=None,
                            load_synthetic_cube=False,
                            show=False,
                            velocity_range=[0, 500],
                            save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_map_panels, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    DIR_FIG = '/d/bip3/ezbc/magellanic_stream/figures/'
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG = DIR_FIG + 'nhi_maps_components.png'
    if save_pdf:
        FILENAME_FIG = FILENAME_FIG.replace('.png', '.pdf')

    # Load HI Cube
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH_BASE = DIR_HI + 'cube_synth_comp'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(
        cube=cube,
        velocity_axis=velocity_axis,
        velocity_range=velocity_range,
    )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_ref['velocity_axis']

    # get number of unique components
    component_colors = np.unique(colors)
    n_components = len(component_colors)

    nhi_list = []
    nhi_max = 0.0
    for i in xrange(n_components):
        if not load_synthetic_cube:
            print('\n\tCreating synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))

            # get the relevant parameters
            indices = np.where(colors == component_colors[i])[0]
            pix_positions = results_ref['pos_pix'][indices]
            fit_params_list = results_ref['data'][indices, 2:]

            print('\n\t\tNumber of components in cube: ' + \
                  '{0:.0f}'.format(len(fit_params_list)))

            cube_synthetic = \
                create_synthetic_cube(pix_positions=pix_positions,
                                      velocity_axis=velocity_axis,
                                      fit_params_list=fit_params_list,
                                      cube_data=cube,
                                      )

            np.save(FILENAME_CUBE_SYNTH_BASE + str(i) + '.npy', cube_synthetic)
        else:
            print('\n\tLoading synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))
            cube_synthetic = np.load(FILENAME_CUBE_SYNTH_BASE + str(i) +
                                     '.npy')

        # Create N(HI) synthetic
        nhi_synthetic = myia.calculate_nhi(
            cube=cube_synthetic,
            velocity_axis=velocity_axis,
            velocity_range=velocity_range,
        )

        nhi_list.append(nhi_synthetic)

        nhi_max_temp = np.max(nhi_synthetic)
        if nhi_max_temp > nhi_max:
            nhi_max = nhi_max_temp

    v_limits = [0, nhi_max]

    # crop to highest valued cubes
    n_left = 4
    n_left = len(nhi_list)
    sum_list = []
    for nhi in nhi_list:
        sum_list.append(np.nansum(nhi))
    sort_indices = np.argsort(sum_list)[::-1]
    new_list = []
    for i in xrange(n_left):
        new_list.append(nhi_list[sort_indices[i]])
    nhi_list = new_list

    # Plot the maps together

    plot_nhi_map_panels(
        nhi_list,
        header=header,
        #limits=[278, -37, 282, -35],
        limits=limits,
        filename=FILENAME_FIG,
        nhi_vlimits=[-0.1, 15],
        show=show,
        vscale='linear',
    )
def main():
    ''' Executes script.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    reload(myg)

    # define directory locations
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/taurus/figures/maps/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    core_dir = output_dir + 'core_arrays/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'

    # Load hi fits file
    hi_image, hi_header = pf.getdata(hi_dir + \
            'taurus_hi_galfa_cube_regrid_planckres.fits', header=True)
    h = hi_header

    # Load av fits file
    av_image, av_header = pf.getdata(av_dir + \
                'taurus_av_planck_5arcmin.fits',
            header=True)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    # create nhi image
    nhi_image = calculate_nhi(hi_cube=hi_image,
            velocity_axis=velocity_axis, velocity_range=[-16.53,28.83])

    if False:
        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image, mask=av_image != av_image)

        plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
                contour_image=av_image, contours=[5,10,15],
                savedir=figure_dir, filename='taurus_nhi_cores_map.png',
                show=True)

    cores = {'L1495':
                {'center_wcs': [(4,14,0), (28, 11, 0)],
                 'map': None,
                 'threshold': 4.75,
                 'box_wcs': [(4,16,30), (27,44,30), (4,5,20), (28,28,33)]
                 },
             'L1495A':
                {'center_wcs': [(4,18,0), (28,23., 0)],
                 'map': None,
                 'threshold': 4.75,
                 'box_wcs': [(4,28,23),(28,12,50),(4,16,23),(29,46,5)],
                 },
             'B213':
                {'center_wcs': [(4, 19, 0), (27, 15,0)],
                 'map': None,
                 'threshold': 4.75,
                 'box_wcs': [(4,22,27), (26,45,47),(4,5,25),(27,18,48)],
                },
             'B220':
                {'center_wcs': [(4, 41, 0.), (26,7,0)],
                 'map': None,
                 'threshold': 7,
                 'box_wcs': [(4,47,49),(25,31,13),(4,40,37),(27,31,17)],
                 },
             'L1527':
                {'center_wcs': [(4, 39, 0.), (25,47, 0)],
                 'map': None,
                 'threshold': 7,
                 'box_wcs': [(4,40,13), (24,46,38), (4,34,35), (25,56,7)],
                 },
             'B215':
                {'center_wcs': [(4, 23, 0), (25, 3, 0)],
                 'map': None,
                 'threshold': 3,
                 'box_wcs': [(4,24,51), (22,36,7), (4,20,54), (25,26,31)],
                 },
             'L1524':
                {'center_wcs': [(4,29,0.), (24,31.,0)],
                 'map': None,
                 'threshold': 3,
                 'box_wcs': [(4,31,0), (22,4,6), (4,25,33), (25,0,55)],
                 }
                }

    cores = convert_core_coordinates(cores, h)

    if False:
        nhi_image = np.zeros(nhi_image.shape)

        for core in cores:
        	core_image = np.load(core_dir + core + '.npy')
        	core_indices = np.where(core_image == core_image)
        	nhi_image[core_indices] += core_image[core_indices]

        nhi_image_trim =np.ma.array(nhi_image, mask=((av_image != av_image) &\
                (nhi_image == 0)))

        nhi_image_trim[nhi_image_trim == 0] = np.NaN

        read_ds9_region(av_dir + 'taurus_av_boxes.reg')

        plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
            savedir=figure_dir,
            cores=cores,
            filename='taurus_nhi_core_regions_map.png',
            show=True)

    if True:
        cores = load_ds9_region(cores,
                filename_base = region_dir + 'taurus_av_boxes_',
                header = h)

        # Grab the mask
        mask = np.zeros((nhi_image.shape))
        for core in cores:
        	xy = cores[core]['box_center_pix']
        	box_width = cores[core]['box_width']
        	box_height = cores[core]['box_height']
        	box_angle = cores[core]['box_angle']
        	mask += myg.get_rectangular_mask(nhi_image,
        	        xy[0], xy[1],
                    width = box_width,
                    height = box_height,
                    angle = box_angle)

        	cores[core]['box_vertices'] = myg.get_rect(
                        xy[0], xy[1],
                        width = box_width,
                        height = box_height,
                        angle = box_angle,)

        mask[mask > 1] = 1

        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image,
                mask = ((av_image != av_image) & \
                        (av_image > 1.0)))
        av_image_trim = np.ma.array(av_image,
                mask = ((nhi_image != nhi_image) & \
                        (av_image > 1.0)))

        nhi_image_trim = np.copy(nhi_image)
        nhi_image_trim[av_image > 1.] = np.nan
        av_image_trim = np.copy(av_image)
        av_image_trim[av_image > 1.] = np.nan

        # Plot
        figure_types = ['png',]
        for figure_type in figure_types:
            # N(HI) alone
            plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
                    contour_image=av_image, contours=[5,10,15],
                    boxes=True, cores = cores,
                    limits=[50,37,200,160],
                    savedir=figure_dir,
                    filename='taurus_nhi_cores_map.%s' % \
                            figure_type,
                    show=0)

            # N(HI) + Av
            plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
                    av_image=av_image_trim,
                    #boxes=True, cores = cores,
                    limits=[50,37,200,160],
                    savedir=figure_dir,
                    filename='taurus_nhi_av_map.%s' % \
                            figure_type,
                    show=0)
def run_cloud_analysis(global_args,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']
    region = global_args['region']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'

    if region == 'east':
	    hi_filename = '/d/bip2/DR2W_v1/Narrow/' + \
		    'GALFA_HI_RA+DEC_060.00+26.35_N.fits'
	    hi_dr1_filename = '/d/bip3/ezbc/galfa/DR1/' + \
		    'GALFA_HI_RA+DEC_060.00+26.35_N.fits'
    else:
        hi_filename = '/d/bip2/DR2W_v1/Narrow/' + \
          'GALFA_HI_RA+DEC_052.00+26.35_N.fits'
        hi_dr1_filename = '/d/bip3/ezbc/galfa/DR1/' + \
		    'GALFA_HI_RA+DEC_052.00+26.35_N.fits'

    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)

    hi_vel_axis = make_velocity_axis(hi_header)
    velocity_range = [-5, 15]

    # use the vel range to derive N(HI)
    nhi_image = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    plot_kwargs = {
                   'figure_dir': figure_dir,
                   'cloud_name': cloud_name,
                   'filename_base': filename_base,
                   'plot_diagnostics': global_args['plot_diagnostics'],
                   #'av_nhi_contour': av_nhi_contour,
                   'av_nhi_contour': True,
                   'av_nhi_limits': [0, 20, -1, 9],
                   #'av_nhi_limits': None,
                    }

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals_perseus_' + region + '.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(nhi_image=nhi_image / nhi_image_dr1,
                   header=hi_header,
                   limits=None,
                   filename=filename,
                   show=0,
                   cb_text='DR2 / DR1'
                   #hi_vlimits=None,
                   )
def main(dgr=None, vel_range=None, vel_range_type='single', region=None,
        av_data_type='planck', use_binned_images=False):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Name of noise cube
    noise_cube_filename = \
            'taurus_hi_galfa_cube_regrid_planckres_noise' + bin_string + \
            '.fits'

    # Name of property files results are written to
    prop_file = 'taurus_global_properties_' + av_data_type + '_scaled'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
    	region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/taurus/figures/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    co_dir = '/d/bip3/ezbc/taurus/data/co/'
    core_dir = '/d/bip3/ezbc/taurus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/taurus/data/python_output/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
    	print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'taurus_av_error_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(co_dir + \
                    'taurus_co_cfa_cube_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)

    # Load global properties of cloud
    # global properties written from script
    # 'av/taurus_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nReading global parameter file\n' + prop_file + '.txt')
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']
    if dgr is not None:
        props['dust2gas_ratio']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio']['value']

    # define core properties
    with open(core_dir + 'taurus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'taurus_av_boxes_',
            header = hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    # Get mask and mask images
    mask = np.asarray(props['mask' + bin_string])

    mask_images = False
    av_image_masked = np.copy(av_image)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_error_masked = np.copy(av_image_error)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_model_masked = np.copy(av_model)
    #av_model_masked[(mask == 1) & (region_mask == 1)] = np.nan

    if mask_images:
        av_image_masked[mask] = np.nan
        av_error_masked[mask] = np.nan
        av_model_masked[mask] = np.nan

    indices = ((np.isnan(av_model_masked)) & \
               (np.isnan(av_image_masked)) & \
               (np.isnan(av_image_error)))

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    if 0:
        import matplotlib.pyplot as plt
        av_plot_data = np.copy(av_image)
        av_plot_data[mask] = np.nan
        plt.imshow(av_plot_data, origin='lower')
        plt.xlim(props['plot_limit_bin']['pixel'][0:3:2])
        plt.ylim(props['plot_limit_bin']['pixel'][1:4:2])
        plt.show()

    # Create HI spectrum
    hi_cube[hi_cube != hi_cube] = 0
    hi_cube[:, mask] = 0
    hi_spectrum = np.mean(hi_cube, axis=(1,2))

    if not use_binned_images:
        # Derive CO spectrum
        co_data[:, mask] = 0
        co_data[np.isnan(co_data)] = 0
        co_spectrum = np.mean(co_data, axis=(1,2))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'single_vel_range/taurus_av_model_map_' + \
                    av_data_type + bin_string
                    #'dgr{0:.3f}_'.format(dgr) + \
                    #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                    #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/taurus_av_model_map_' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(vel_range[i, 0],
                                                              vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'taurus_av_model_map_region{0:.0f}'.format(region)

        print('\nSaving Av model image to \n' + figure_dir + filename + \
                '.' + figure_type)

        if 0:
            plot_av_model(av_image=av_image_masked,
                          av_model=av_model_masked,
                          header=av_header,
                          results=props,
                          limits=props['plot_limit' + bin_string]['pixel'],
                          savedir=figure_dir + 'maps/av_models/',
                          filename=filename + '.' + figure_type,
                          show=False)

        if 1:
            #if not use_binned_images:
            if 0:
                plot_av_model(av_image=av_image_masked,
                              av_model=av_model_masked,
                              header=av_header,
                              results=props,
                              hi_velocity_axis=velocity_axis,
                              vel_range=vel_range,
                              hi_spectrum=hi_spectrum,
                              #hi_limits=[-15, 25, -1, 10],
                              hi_limits=[-15, 25, None, None],
                              co_spectrum=co_spectrum,
                              co_velocity_axis=co_velocity_axis,
                              limits=props['plot_limit' + bin_string]['pixel'],
                              savedir=figure_dir + 'maps/av_models/',
                              filename=filename + '_spectra' + '.' + figure_type,
                              show=False)


            plot_avmod_vs_av((av_model_masked,),
                    (av_image_masked,),
                    av_errors=(av_error_masked,),
                    #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
                    limits=[0,20,0,3],
                    savedir=figure_dir + 'av/',
                    gridsize=(10,10),
                    #scale=('log', 'log'),
                    #scale=('linear', 'linear'),
                    filename='taurus_avmod_vs_av%s.%s' % (bin_string, figure_type),
                    show = False,
                    std=0.22,
                    )

        if 0:
            plot_power_spectrum(av_image_masked - av_model_masked,
                filename_prefix='taurus_av_resid_power_spectrum_' + \
                                '{0:s}'.format(av_data_type),
                filename_suffix='.{0:s}'.format(figure_type),
                savedir=figure_dir + 'power_spectra/',
                show=False)
def main(dgr=None, vel_range=(-5, 15), vel_range_type='single', region=None,
        av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system,path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Which cores to include in analysis?
    cores_to_keep = [# taur
                     'L1495',
                     'L1495A',
                     'B213',
                     'L1498',
                     'B215',
                     'B18',
                     'B217',
                     'B220-1',
                     'B220-2',
                     'L1521',
                     'L1524',
                     'L1527-1',
                     'L1527-2',
                     # Calif
                     'L1536',
                     'L1483-1',
                     'L1483-2',
                     'L1482-1',
                     'L1482-2',
                     'L1478-1',
                     'L1478-2',
                     'L1456',
                     'NGC1579',
                     #'L1545',
                     #'L1517',
                     #'L1512',
                     #'L1523',
                     #'L1512',
                     # Pers
                     'B5',
                     'IC348',
                     'B1E',
                     'B1',
                     'NGC1333',
                     'B4',
                     'B3',
                     'L1455',
                     'L1448',
                     ]

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_tau353':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nLoading global property file {0:s}.txt'.format(prop_file))
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    # Define velocity range
    props['hi_velocity_range'] = vel_range

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename,
            header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    props['plot_limit']['wcs'] = (((5, 20, 0), (19, 0 ,0)),
                                  ((2, 30, 0), (37, 0, 0))
                                  )


    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits',
                                              'plot_limit',
                                              'region_name_pos'))

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))


    cloud_dict = {'taurus' : {},
                  'perseus' : {},
                  'california' : {},
                  }

    # load Planck Av and GALFA HI images, on same grid
    for cloud in cloud_dict:

        print('\nLoading core properties for {0:s}'.format(cloud))

        file_dir = '/d/bip3/ezbc/{0:s}/data/av/'.format(cloud)

        # define core properties
        with open('/d/bip3/ezbc/{0:s}/data/python_output/'.format(cloud) + \
                  'core_properties/{0:s}_core_properties.txt'.format(cloud),
                  'r') as f:
             cores = json.load(f)

        # Load core regions from DS9 files
        if cloud == 'aldobaran':
            region_cloud = 'california'
        else:
            region_cloud = cloud
        core_filename = '/d/bip3/ezbc/' + region_cloud + '/data/python_output' + \
                        '/ds9_regions/{0:s}_av_poly_cores'.format(region_cloud)

        cores = load_ds9_core_region(cores,
                                     filename_base=core_filename,
                                     header=av_header)

        cores = convert_core_coordinates(cores, av_header)

        # Remove cores
        cores_to_remove = []
        for core in cores:
            if core not in cores_to_keep:
                cores_to_remove.append(core)
        for core_to_remove in cores_to_remove:
            del cores[core_to_remove]

        cloud_dict[cloud]['cores'] = cores

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        filename = 'av_cores_map' + \
                   '.{0:s}'.format(figure_type)

        print('\nSaving Av cores map to \n' + filename)

        plot_cores_map(header=av_header,
                       av_image=av_image,
                       limits=props['plot_limit']['pixel'],
                       regions=props['regions'],
                       cloud_dict=cloud_dict,
                       cores_to_keep=cores_to_keep,
                       props=props,
                       hi_vlimits=(0,20),
                       av_vlimits=(0,16),
                       #av_vlimits=(0.1,30),
                       savedir=figure_dir + 'maps/',
                       filename=filename,
                       show=False)
def main(dgr=None, vel_range=(-5, 15), vel_range_type="single", region=None, av_data_type="planck"):
    """ Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    """

    # import external modules
    import pyfits as fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = "multicloud_hi_galfa_cube_regrid_planckres_noise.fits"

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    # av_data_type = 'lee12'
    # av_data_type = 'planck'

    # Global parameter file
    prop_file = "multicloud_global_properties"

    # Which cores to include in analysis?
    cores_to_keep = [  # taur
        "L1495",
        "L1495A",
        "B213",
        "L1498",
        "B215",
        "B18",
        "B217",
        "B220-1",
        "B220-2",
        "L1521",
        "L1524",
        "L1527-1",
        "L1527-2",
        # Calif
        "L1536",
        "L1483-1",
        "L1483-2",
        "L1482-1",
        "L1482-2",
        "L1478-1",
        "L1478-2",
        "L1456",
        "NGC1579",
        #'L1545',
        #'L1517',
        #'L1512',
        #'L1523',
        #'L1512',
        # Pers
        "B5",
        "IC348",
        "B1E",
        "B1",
        "NGC1333",
        "B4",
        "B3",
        "L1455",
        "L1448",
    ]

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {"wcs": (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))), "pixel": ()}
    elif region == 2:
        region_limit = {"wcs": (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))), "pixel": ()}
    elif region == 3:
        region_limit = {"wcs": (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))), "pixel": ()}
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = "/d/bip3/ezbc/multicloud/data/python_output/nhi_av/"
    figure_dir = "/d/bip3/ezbc/multicloud/figures/"
    av_dir = "/d/bip3/ezbc/multicloud/data/av/"
    hi_dir = "/d/bip3/ezbc/multicloud/data/hi/"
    co_dir = "/d/bip3/ezbc/multicloud/data/co/"
    core_dir = "/d/bip3/ezbc/multicloud/data/python_output/core_properties/"
    property_dir = "/d/bip3/ezbc/multicloud/data/python_output/"
    region_dir = "/d/bip3/ezbc/multicloud/data/python_output/"

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == "lee12_2mass":
        print("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_lee12_2mass_regrid_planckres.fits", return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == "lee12_iris":
        print("\nLoading Lee+12 data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_lee12_iris_regrid_planckres.fits", return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == "planck_rad":
        print("\nLoading Planck data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_planck_radiance_5arcmin.fits", return_header=True)
        av_image_error, av_error_header = load_fits(
            av_dir + "multicloud_av_error_planck_radiance_5arcmin.fits", return_header=True
        )
    else:
        print("\nLoading Planck data...")
        av_image, av_header = load_fits(av_dir + "multicloud_av_planck_5arcmin.fits", return_header=True)

        av_image_error, av_error_header = load_fits(
            av_dir + "multicloud_av_error_planck_5arcmin.fits", return_header=True
        )

    hi_cube, hi_header = load_fits(hi_dir + "multicloud_hi_galfa_cube_regrid_planckres.fits", return_header=True)

    co_data, co_header = load_fits(co_dir + "multicloud_co_cfa_cube_regrid_planckres.fits", return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += "_region{0:.0f}".format(region)
        results_filename += "_region{0:.0f}".format(region)

    print("\nLoading global property file {0:s}.txt".format(prop_file))
    with open(property_dir + prop_file + ".txt", "r") as f:
        props = json.load(f)

    # Define velocity range
    props["hi_velocity_range"] = vel_range

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(
            cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_noise_range=[90, 110],
            header=hi_header,
            Tsys=30.0,
            filename=hi_dir + noise_cube_filename,
        )
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename, header=True)

    # create nhi image
    nhi_image = calculate_nhi(
        cube=hi_cube, velocity_axis=velocity_axis, velocity_range=vel_range, header=hi_header, noise_cube=hi_noise_cube
    )

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(
        props, header=av_header, coords=("region_limit", "co_noise_limits", "plot_limit", "region_name_pos")
    )

    # Load cloud division regions from ds9
    props = load_ds9_region(props, filename=region_dir + "multicloud_divisions.reg", header=av_header)

    # Derive relevant region
    pix = props["region_limit"]["pixel"]
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]), (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print("\nRegion size = " + "{0:.0f} pix".format(region_mask[region_mask == 1].size))

    if vel_range_type == "single":
        print("\nHI velocity integration range:")
        print("%.1f to %.1f km/s" % (vel_range[0], vel_range[1]))
    elif vel_range_type == "multiple":
        print("\nHI velocity integration ranges:")
        for i in xrange(0, vel_range.shape[0]):
            print("%.1f to %.1f km/s" % (vel_range[i, 0], vel_range[i, 1]))

    cloud_dict = {"taurus": {}, "perseus": {}, "california": {}}

    # load Planck Av and GALFA HI images, on same grid
    for cloud in cloud_dict:

        print("\nLoading core properties for {0:s}".format(cloud))

        file_dir = "/d/bip3/ezbc/{0:s}/data/av/".format(cloud)

        # define core properties
        with open(
            "/d/bip3/ezbc/{0:s}/data/python_output/".format(cloud)
            + "core_properties/{0:s}_core_properties.txt".format(cloud),
            "r",
        ) as f:
            cores = json.load(f)

        # Load core regions from DS9 files
        if cloud == "aldobaran":
            region_cloud = "california"
        else:
            region_cloud = cloud
        core_filename = region_dir.replace("multicloud", region_cloud) + "/ds9_regions/{0:s}_av_poly_cores".format(
            region_cloud
        )

        cores = load_ds9_core_region(cores, filename_base=core_filename, header=av_header)

        cores = convert_core_coordinates(cores, av_header)

        # Remove cores
        cores_to_remove = []
        for core in cores:
            if core not in cores_to_keep:
                cores_to_remove.append(core)
        for core_to_remove in cores_to_remove:
            del cores[core_to_remove]

        cloud_dict[cloud]["cores"] = cores

    # Plot
    figure_types = ["png", "pdf"]
    for figure_type in figure_types:
        filename = "multicloud_av_cores_map" + ".{0:s}".format(figure_type)

        print("\nSaving Av cores map to \n" + filename)

        plot_cores_map(
            header=av_header,
            av_image=av_image,
            limits=props["plot_limit"]["pixel"],
            regions=props["regions"],
            cloud_dict=cloud_dict,
            cores_to_keep=cores_to_keep,
            props=props,
            hi_vlimits=(0, 20),
            av_vlimits=(0, 16),
            # av_vlimits=(0.1,30),
            savedir=figure_dir + "maps/",
            filename=filename,
            show=False,
        )
def run_cloud_analysis(global_args,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
                   'figure_dir': figure_dir,
                   'cloud_name': cloud_name,
                   'filename_base': filename_base,
                   'plot_diagnostics': global_args['plot_diagnostics'],
                   #'av_nhi_contour': av_nhi_contour,
                   'av_nhi_contour': True,
                   'av_nhi_limits': [0, 20, -1, 9],
                   #'av_nhi_limits': None,
                    }


    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])


    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)


    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)


    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                             statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
                       'velocity_range': velocity_range,
                       'gauss_fits': gauss_fits,
                       'comp_num': comp_num,
                       'hi_range_error': hi_range_error,
                       'vel_range': velocity_range,
                       'gauss_fit_kwargs': gauss_fit_kwargs,
                       }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_spectrum,
                 gauss_fits=gauss_fits,
                 comp_num=comp_num,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_dr1_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_dr1_spectrum,
                 gauss_fits=gauss_fits_dr1,
                 comp_num=comp_num_dr1,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range_dr1,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(nhi_image=nhi_image / nhi_image_dr1,
                   header=hi_header,
                   limits=[65, 45, 25, 35],
                   filename=filename,
                   show=0,
                   cb_text='DR2 / DR1',
                   #hi_vlimits=[0.91, 0.93],
                   )
示例#17
0
def plot_co_spectra(results, ):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_co_spectra'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    co_filename = cloud.co_filename

    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        co_filename = co_filename.replace('.fits', '_bin.fits')

    exists = \
        check_file(co_filename, clobber=False)

    if not exists:
        co_data, co_header = fits.getdata(
            co_filename,
            header=True,
        )
        cloud.co_data, cloud.co_header = \
            bin_image(co_data,
                      binsize=(1, cloud.binsize, cloud.binsize),
                      header=co_header,
                      statistic=np.nanmean)

        fits.writeto(
            cloud.co_filename.replace('.fits', '_bin.fits'),
            cloud.co_data,
            cloud.co_header,
        )
    else:
        cloud.co_data, cloud.co_header = \
            fits.getdata(co_filename,
                                                      header=True,
                                                      )

    cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(cloud.av_filename_bin, header=True)
    cloud.load_region(cloud.region_filename, header=cloud.av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    cloudpy.plot_hi_spectrum(
        cloud,
        filename=filename_base + '.png',
        limits=[-50, 30, -10, 70],
        plot_co=plot_co,
        hi_mask=hi_mask,
        co_mask=co_mask,
    )
示例#18
0
def plot_hi_spectrum(cloud_results, plot_co=1):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_hi_spectrum'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    if plot_co:

        co_filename = cloud.co_filename

        if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
            co_filename = co_filename.replace('.fits', '_bin.fits')

        exists = \
            check_file(co_filename, clobber=False)

        if not exists:
            co_data, co_header = fits.getdata(
                co_filename,
                header=True,
            )
            cloud.co_data, cloud.co_header = \
                bin_image(co_data,
                          binsize=(1, cloud.binsize, cloud.binsize),
                          header=co_header,
                          statistic=np.nanmean)

            fits.writeto(
                cloud.co_filename.replace('.fits', '_bin.fits'),
                cloud.co_data,
                cloud.co_header,
            )
        else:
            cloud.co_data, cloud.co_header = \
                fits.getdata(co_filename,
                                                          header=True,
                                                          )

        cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        av_filename = cloud.av_filename_bin
        hi_filename = cloud.hi_filename_bin
    else:
        av_filename = cloud.av_filename
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(av_filename, header=True)
    cloud.hi_data, cloud.hi_header = \
            fits.getdata(hi_filename, header=True)
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    import matplotlib.pyplot as plt
    plt.close()
    plt.clf()
    co = np.copy(cloud.co_data[30, :, :])
    co[co_mask] = np.nan
    plt.imshow(co, origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/comap_' + cloud.region + '.png')

    assert all(
        (cloud.hi_data.shape, cloud.co_data.shape, cloud.region_mask.shape))

    cloudpy.plot_hi_spectrum(
        cloud,
        filename=filename_base + '.png',
        limits=[-50, 30, -10, 70],
        plot_co=plot_co,
        hi_mask=hi_mask,
        co_mask=co_mask,
    )
示例#19
0
def main():

    import grid
    import numpy as np
    from os import system,path
    import myclumpfinder as clump_finder
    reload(clump_finder)
    import mygeometry as myg
    reload(myg)
    from mycoords import make_velocity_axis
    import mymath
    reload(mymath)

    # define directory locations
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/co_dispersion/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/galfa/'
    cfa_dir = '/d/bip3/ezbc/perseus/data/cfa/'
    core_dir = output_dir + 'core_arrays/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'

    # load 2mass Av and GALFA HI images, on same grid
    cfa_data, cfa_header = load_fits(cfa_dir + \
                'perseus_cfa_cube_galfa_regrid.fits',
            return_header=True)

    #av_data += - 0.4 # subtracts background of 0.4 mags
    hi_data, hi_header = load_fits(hi_dir + \
                'perseus_galfa_cube_bin_3.7arcmin.fits',
            return_header = True)

    # make the velocity axis
    hi_velocity_axis = make_velocity_axis(hi_header)
    cfa_velocity_axis = make_velocity_axis(cfa_header)

    cfa_mom2 = mymath.calc_moment(cfa_data, moment = 2, spectral_axis = 0)

    # define core properties
    cores = {'L1495':
                {'center_wcs': [(4,14,0), (28, 11, 0)],
                 'map': None,
                 'threshold': 4.75,
                 'box_wcs': [(4,16,30), (27,44,30), (4,5,20), (28,28,33)]
                 },
             'L1495A':
                {'center_wcs': [(4,18,0), (28,23., 0)],
                 'map': None,
                 'threshold': 4.75,
                 'box_wcs': [(4,28,23),(28,12,50),(4,16,23),(29,46,5)],
                 },
             'B213':
                {'center_wcs': [(4, 19, 0), (27, 15,0)],
                 'map': None,
                 'threshold': 4.75,
                 'box_wcs': [(4,22,27), (26,45,47),(4,5,25),(27,18,48)],
                },
             'B220':
                {'center_wcs': [(4, 41, 0.), (26,7,0)],
                 'map': None,
                 'threshold': 7,
                 'box_wcs': [(4,47,49),(25,31,13),(4,40,37),(27,31,17)],
                 },
             'L1527':
                {'center_wcs': [(4, 39, 0.), (25,47, 0)],
                 'map': None,
                 'threshold': 7,
                 'box_wcs': [(4,40,13), (24,46,38), (4,34,35), (25,56,7)],
                 },
             'B215':
                {'center_wcs': [(4, 23, 0), (25, 3, 0)],
                 'map': None,
                 'threshold': 3,
                 'box_wcs': [(4,24,51), (22,36,7), (4,20,54), (25,26,31)],
                 },
             'L1524':
                {'center_wcs': [(4,29,0.), (24,31.,0)],
                 'map': None,
                 'threshold': 3,
                 'box_wcs': [(4,31,0), (22,4,6), (4,25,33), (25,0,55)],
                 }
                }

    cores = convert_core_coordinates(cores, hi_header)

    if True:
        hsd_limits =[0.1,300]
        hisd_limits = [2,20]
        av_limits =[0.01,100]
        nhi_limits = [2,20]

        cores = load_ds9_region(cores,
                filename_base = region_dir + 'taurus_av_boxes_',
                header = hi_header)

        # save cores for later
        if 0:
            mask = np.zeros((cfa_mom2.shape))
            for core in cores:
                print('Calculating for core %s' % core)

                av_limits = [0.01,100]

                # Grab the mask
                xy = cores[core]['box_center_pix']
                box_width = cores[core]['box_width']
                box_height = cores[core]['box_height']
                box_angle = cores[core]['box_angle']
                mask += myg.get_rectangular_mask(cfa_mom2,
                        xy[0], xy[1],
                        width = box_width,
                        height = box_height,
                        angle = box_angle)

                cores[core]['box_vertices'] = myg.get_rect(
                            xy[0], xy[1],
                            width = box_width,
                            height = box_height,
                            angle = box_angle,)

                indices = np.where(mask == 1)

            mask[mask > 1] = 1
            cfa_mom2[mask == 0] = np.nan

        # currently have variance, need dispersion
        cfa_mom2 = cfa_mom2**0.5


        # Plot
        plot_mom2_image(mom2_image = cfa_mom2,
                header = cfa_header,
                boxes = False,
                #limits=[128,37,308,206],
                title = r'Perseus: $\sigma_{\rm CO}$ map with core ' + \
                        'boxed-regions.',
                savedir = figure_dir,
                filename='perseus_co_veldisp_map.png',
                show=True)
def main(dgr=None,
         vel_range=None,
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'

    # Load Data
    # ---------
    # Load global properties of cloud
    # global properties written from script
    # 'av/perseus_analysis_global_properties.txt'
    prop_file = 'perseus_global_properties'  # _' + av_data_type
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if props['use_binned_image']:
        bin_string = '_bin'
    else:
        bin_string = ''

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_planck_radiance_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'perseus_av_error_planck_radiance_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'perseus_av_planck_5arcmin' + bin_string + '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'perseus_av_error_planck_5arcmin' + bin_string + '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename +
                                            bin_string + '.fits',
                                            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'perseus_co_cfa_cube_regrid_planckres' + bin_string + '.fits',
            return_header=True)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']
    if dgr is not None:
        props['dust2gas_ratio']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio']['value']

    # define core properties
    with open(core_dir + 'perseus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'perseus_av_boxes_',
                            header=hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    # Mask the images based on av trheshol
    co_data_nonans = np.copy(co_data)
    co_data_nonans[np.isnan(co_data_nonans)] = 0.0
    co_mom0 = np.sum(co_data_nonans, axis=0)
    mask = ((av_image > props['av_threshold']['value']) & \
            (co_mom0 > props['co_threshold']['value']))

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    # Get mask and mask images
    mask = np.asarray(props['mask'])

    av_image_masked = np.copy(av_image)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan
    av_image_masked[mask == 1] = np.nan

    av_error_masked = np.copy(av_image_error)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan
    av_error_masked[mask == 1] = np.nan

    av_model_masked = np.copy(av_model)
    #av_model_masked[(mask == 1) & (region_mask == 1)] = np.nan
    av_model_masked[mask == 1] = np.nan

    indices = ((np.isnan(av_model_masked)) & \
               (np.isnan(av_image_masked)) & \
               (np.isnan(av_image_error)))

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    # Create HI spectrum
    hi_cube[hi_cube != hi_cube] = 0
    hi_cube[:, mask == 1] = 0
    hi_spectrum = np.mean(hi_cube, axis=(1, 2))

    # Derive CO spectrum
    co_data[:, region_mask == 1] = 0
    co_data[np.isnan(co_data)] = 0
    co_spectrum = np.mean(co_data, axis=(1, 2))

    # Plot
    figure_types = [
        'png',
    ]  # 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'single_vel_range/perseus_av_model_map_' + \
                    av_data_type + '.%s' % figure_type
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/perseus_av_model_map_' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(
                        vel_range[i, 0], vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'perseus_av_model_map_region{0:.0f}'.format(region) + \
                       '.{0:s}'.format(figure_type)

        print('\nSaving Av model image to \n' + filename)

        plot_av_model(
            av_image=av_image_masked,
            av_model=av_model_masked,
            header=av_header,
            results=props,
            hi_velocity_axis=velocity_axis,
            vel_range=vel_range,
            hi_spectrum=hi_spectrum,
            #hi_limits=[-15, 25, -1, 10],
            hi_limits=[-15, 25, None, None],
            co_spectrum=co_spectrum,
            co_velocity_axis=co_velocity_axis,
            limits=props['plot_limit']['pixel'],
            savedir=figure_dir + 'maps/av_models/',
            filename=filename,
            show=False)

        plot_avmod_vs_av(
            (av_model_masked, ),
            (av_image_masked, ),
            av_errors=(av_error_masked, ),
            #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
            limits=[0, 1.5, 0, 1.5],
            savedir=figure_dir + 'av/',
            gridsize=(10, 10),
            #scale=('log', 'log'),
            #scale=('linear', 'linear'),
            filename='perseus_avmod_vs_av.%s' % figure_type,
            show=False,
            std=0.22,
        )

        plot_power_spectrum(av_image_masked - av_model_masked,
            filename_prefix='perseus_av_resid_power_spectrum_' + \
                            '{0:s}'.format(av_data_type),
            filename_suffix='.{0:s}'.format(figure_type),
            savedir=figure_dir + 'power_spectra/',
            show=False)
def plot_nhi_maps(results_dict, limits=None, cube_data=None, header=None,
        load_synthetic_cube=False, show=False, velocity_range=[0, 500],
        save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG_BASE = DIR_FIG + 'nhi_map_data_synth'

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(cube=cube_data,
                                  velocity_axis=velocity_axis,
                                  velocity_range=velocity_range,
                                  )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_dict['velocity_axis']
    if not load_synthetic_cube:
        print('\nCreating synthetic cube...')
        cube_synthetic = create_synthetic_cube(results_dict, cube_data)

        np.save(FILENAME_CUBE_SYNTH, cube_synthetic)
    else:
        print('\nLoading synthetic cube...')
        cube_synthetic = np.load(FILENAME_CUBE_SYNTH)

    # Create N(HI) synthetic
    nhi_synthetic = myia.calculate_nhi(cube=cube_synthetic,
                                       velocity_axis=velocity_axis,
                                       velocity_range=velocity_range,
                                       )

    v_limits = [0, np.max(nhi_data)]
    v_limits = [-1, 41]

    if 0:
        import matplotlib.pyplot as plt
        plt.close(); plt.clf()
        fig, axes = plt.subplots(2,1)
        axes[0].imshow(nhi_data, origin='lower')
        axes[1].imshow(nhi_synthetic, origin='lower')
        plt.show()

    if save_pdf:
        ext = '.pdf'
    else:
        ext = '.png'
    filename_fig = FILENAME_FIG_BASE + ext
    print('\nPlotting N(HI) maps...')
    print(filename_fig)
    # Plot the maps together
    plot_nhi_maps(nhi_data,
                  nhi_synthetic,
                  header=header,
                  #limits=[278, -37, 282, -35],
                  limits=limits,
                  filename=filename_fig,
                  nhi_1_vlimits=v_limits,
                  nhi_2_vlimits=v_limits,
                  show=show,
                  vscale='linear',
                  )
def main(av_data_type='planck'):

    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    from astropy.io import fits
    import matplotlib.pyplot as plt

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    likelihood_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)
    results_filename = 'taurus_likelihood_{0:s}'.format(av_data_type)

    # Threshold for converging DGR
    threshold_delta_dgr = 0.00005

    # define directory locations
    # --------------------------
    output_dir = '/home/ezbc/research/data/taurus/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/taurus/figures/hi_velocity_range/'
    av_dir = '/home/ezbc/research/data/taurus/av/'
    hi_dir = '/home/ezbc/research/data/taurus/hi/'
    co_dir = '/home/ezbc/research/data/taurus/co/'
    core_dir = '/home/ezbc/research/data/taurus/python_output/core_properties/'
    property_dir = '/home/ezbc/research/data/taurus/python_output/'
    region_dir = '/home/ezbc/research/data/taurus/python_output/ds9_regions/'
    likelihood_dir = '/home/ezbc/research/data/taurus/python_output/nhi_av/'


    # Load data
    # ---------
    av_data, av_header = fits.getdata(av_dir + \
                                       'taurus_av_planck_5arcmin.fits',
                                       header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'taurus_av_error_planck_5arcmin.fits',
            header=True)

    hi_data, hi_header = fits.getdata(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres.fits',
            header=True)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Velocity range over which to integrate HI
    vel_range = (0, 10)

    # Make Av model
    # -------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    #plt.clf(); plt.close()
    #plt.imshow(nhi_image, origin='lower')
    #plt.show()

    # Mask out nans and high-valued pixels
    mask = ((av_data > 30.0) | \
            np.isnan(av_data) | \
            np.isnan(av_data_error) | \
            (av_data_error == 0) | \
            np.isnan(nhi_image))

    # solve for DGR using linear least squares
    print('\nSolving for DGR...')

    delta_dgr = 1e10
    dgr = 1e10
    while delta_dgr > threshold_delta_dgr:
        A = np.array((np.ravel(nhi_image[~mask] / av_data_error[~mask]),))
        b = np.array((np.ravel(av_data[~mask] / av_data_error[~mask]),))
        A = np.matrix(A).T
        b = np.matrix(b).T
        #dgr = np.dot(np.linalg.pinv(A), b)
        dgr_new = (np.linalg.pinv(A) * b)[0, 0]

        # Create model with the DGR
        print('\nDGR = {0:.2} 10^20 cm^2 mag'.format(dgr))
        av_image_model = nhi_image * dgr_new

        residuals = av_data - av_image_model

        # Include only residuals which are white noise
        mask_new = get_residual_mask(residuals,
                resid_width_scale=2.0, plot_progress=0)

        # Mask non-white noise, i.e. correlated residuals.
        mask += mask_new

        npix = mask.size - np.sum(mask)
        print('Number of non-masked pixels = {0:.0f}'.format(npix))

        # Reset while loop conditions
        delta_dgr = np.abs(dgr - dgr_new)
        dgr = dgr_new

    plt.clf(); plt.close()
    nhi_image_copy = np.copy(nhi_image)
    nhi_image_copy[mask] = np.nan
    av_image_copy = np.copy(av_data)
    resid_image = av_image_copy - nhi_image_copy * dgr
    plt.imshow(resid_image, origin='lower')
    plt.title(r'$A_V$ Data - Model')
    plt.colorbar()
    plt.show()
def main(av_data_type='planck'):

    # Import external modules
    # -----------------------
    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube
    #from astropy.io import fits
    import pyfits as fits
    import matplotlib.pyplot as plt

    # Set parameters
    # --------------
    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Name of HI noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise'

    # Threshold for converging DGR
    threshold_delta_dgr = 0.00005

    # Number of white noise standard deviations with which to fit the
    # residuals in iterative masking
    resid_width_scale = 3.0

    # Name of property files results are written to
    global_property_file = 'perseus_global_properties.txt'

    # Likelihood axis resolutions
    vel_widths = np.arange(1, 30, 2*0.16667)
    dgrs = np.arange(0.01, 0.2, 1e-3)
    #vel_widths = np.arange(1, 50, 8*0.16667)
    #dgrs = np.arange(0.01, 0.2, 1e-2)

    # Velocity range over which to integrate HI for deriving the mask
    vel_range = (-20, 20)

    # Use binned image?
    use_binned_image = False

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = \
        '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'

    # Load data
    # ---------
    if use_binned_image:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Adjust filenames
    noise_cube_filename += bin_string
    likelihood_filename = 'perseus_likelihood_{0:s}'.format(av_data_type) + \
                          bin_string
    results_filename = 'perseus_likelihood_{0:s}'.format(av_data_type) + \
                       bin_string

    av_data, av_header = fits.getdata(av_dir + \
                            'perseus_av_planck_5arcmin' + bin_string + '.fits',
                                      header=True)

    av_data_error, av_error_header = fits.getdata(av_dir + \
                'perseus_av_error_planck_5arcmin' + bin_string + '.fits',
            header=True)

    if use_binned_image:
        #av_data_error = (100 * 0.025**2) * np.ones(av_data_error.shape)
        av_data_error *= 5

    hi_data, hi_header = fits.getdata(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres' + bin_string + '.fits',
            header=True)

    # Load global properties
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    # Prepare data products
    # ---------------------
    # Change WCS coords to pixel coords of images
    global_props = convert_limit_coordinates(global_props, header=av_header)

    # make the velocity axes
    hi_vel_axis = make_velocity_axis(hi_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename + '.fits'):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename + '.fits')
    else:
        noise_cube, noise_header = fits.getdata(hi_dir +
                noise_cube_filename + '.fits',
            header=True)

    # Derive relevant region
    pix = global_props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block off region
    region_mask = np.logical_not(myg.get_polygon_mask(av_data, region_vertices))

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    # Derive mask by excluding correlated residuals
    # ---------------------------------------------
    nhi_image = calculate_nhi(cube=hi_data,
                              velocity_axis=hi_vel_axis,
                              velocity_range=vel_range,
                              return_nhi_error=False,
                              )

    av_model, mask, dgr = iterate_residual_masking(
                             nhi_image=nhi_image,
                             av_data=av_data,
                             av_data_error=av_data_error,
                             vel_range=vel_range,
                             threshold_delta_dgr=threshold_delta_dgr,
                             resid_width_scale=resid_width_scale,
                             init_mask=region_mask,
                             verbose=1,
                             plot_progress=0,
                             )

    # Combine region mask with new mask
    #mask += np.logical_not(region_mask)
    mask += region_mask

    if 1:
        import matplotlib.pyplot as plt
        plt.imshow(np.ma.array(av_data, mask=mask), origin='lower')
        plt.show()

    # Derive center velocity from hi
    # ------------------------------
    hi_spectrum = np.sum(hi_data[:, ~mask], axis=(1))
    vel_center = np.array((np.average(hi_vel_axis,
                           weights=hi_spectrum**2),))[0]
    print('\nVelocity center from HI = ' +\
            '{0:.2f} km/s'.format(vel_center))

    # Perform likelihood calculation of masked images
    # -----------------------------------------------
    # Define filename for plotting results
    results_filename = figure_dir + 'likelihood/'+ results_filename

    results = calc_likelihoods(
                     hi_cube=hi_data[:, ~mask],
                     hi_vel_axis=hi_vel_axis,
                     av_image=av_data[~mask],
                     av_image_error=av_data_error[~mask],
                     vel_center=vel_center,
                     vel_widths=vel_widths,
                     dgrs=dgrs,
                     results_filename='',
                     return_likelihoods=True,
                     likelihood_filename=None,
                     clobber=False,
                     conf=conf,
                     )

    # Unpack output of likelihood calculation
    (vel_range_confint, width_confint, dgr_confint, likelihoods,
            width_likelihood, dgr_likelihood, width_max, dgr_max,
            vel_range_max) = results

    print('\nHI velocity integration range:')
    print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                 vel_range_confint[1]))
    print('\nDGR:')
    print('%.1f x 10^-20 cm^2 mag' % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = \
            calculate_nhi(cube=hi_data,
                velocity_axis=hi_vel_axis,
                velocity_range=vel_range_max,
                noise_cube=noise_cube,
                return_nhi_error=True)
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = ((av_image_model == av_image_model) & \
               (av_data == av_data))
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask])**2 / \
            av_data_error[~mask]**2) / av_data[~mask].size

    print('\nTotal number of pixels in analysis, after masking = ' + \
            '{0:.0f}'.format(npix))

    print('\nReduced chi^2 = {0:.1f}'.format(chisq))

    # Write results to global properties
    global_props['dust2gas_ratio'] = {}
    global_props['dust2gas_ratio_error'] = {}
    global_props['hi_velocity_width'] = {}
    global_props['hi_velocity_width_error'] = {}
    global_props['dust2gas_ratio_max'] = {}
    global_props['hi_velocity_center_max'] = {}
    global_props['hi_velocity_width_max'] = {}
    global_props['hi_velocity_range_max'] =  {}
    global_props['av_threshold'] = {}
    global_props['co_threshold'] = {}
    global_props['hi_velocity_width']['value'] = width_confint[0]
    global_props['hi_velocity_width']['unit'] = 'km/s'
    global_props['hi_velocity_width_error']['value'] = width_confint[1:]
    global_props['hi_velocity_width_error']['unit'] = 'km/s'
    global_props['hi_velocity_range'] = vel_range_confint[0:2]
    global_props['hi_velocity_range_error'] = vel_range_confint[2:]
    global_props['dust2gas_ratio']['value'] = dgr_confint[0]
    global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
    global_props['dust2gas_ratio_max']['value'] = dgr_max
    global_props['hi_velocity_center_max']['value'] = vel_center
    global_props['hi_velocity_width_max']['value'] = width_max
    global_props['hi_velocity_range_max']['value'] = vel_range_max
    global_props['hi_velocity_range_conf'] = conf
    global_props['width_likelihood'] = width_likelihood.tolist()
    global_props['dgr_likelihood'] = dgr_likelihood.tolist()
    global_props['vel_centers'] = [vel_center,]
    global_props['vel_widths'] = vel_widths.tolist()
    global_props['dgrs'] = dgrs.tolist()
    global_props['likelihoods'] = likelihoods.tolist()
    global_props['av_threshold']['value'] = None
    global_props['av_threshold']['unit'] = 'mag'
    global_props['co_threshold']['value'] = None
    global_props['co_threshold']['unit'] = 'K km/s'
    global_props['chisq'] = chisq
    global_props['npix'] = npix
    global_props['mask'] = mask.tolist()
    global_props['use_binned_image'] = use_binned_image

    with open(property_dir + global_property_file, 'w') as f:
        json.dump(global_props, f)

    # Plot likelihood space
    print('\nWriting likelihood image to\n' + results_filename + '_wd.png')
    plot_likelihoods_hist(global_props,
                          plot_axes=('widths', 'dgrs'),
                          show=0,
                          returnimage=False,
                          filename=results_filename + '_wd.png',
                          contour_confs=contour_confs)

    if 0:
        plt.clf(); plt.close()
        nhi_image_copy = np.copy(nhi_image)
        nhi_image_copy[mask] = np.nan
        av_image_copy = np.copy(av_data)
        resid_image = av_image_copy - nhi_image_copy * dgr
        plt.imshow(resid_image, origin='lower')
        plt.title(r'$A_V$ Data - Model')
        plt.colorbar()
        plt.show()
def main():
    ''' Executes script.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    reload(myg)

    # define directory locations
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/galfa/'
    core_dir = output_dir + 'core_arrays/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'

    # Load hi fits file
    hi_image, hi_header = pf.getdata(hi_dir + \
            'perseus_galfa_cube_bin_3.7arcmin.fits', header=True)
    h = hi_header

    # Load av fits file
    av_image, av_header = \
    pf.getdata('/d/bip3/ezbc/perseus/data/2mass/perseus_av_2mass_galfa_regrid.fits',
            header=True)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    # create nhi image
    nhi_image = calculate_nhi(hi_cube=hi_image,
            velocity_axis=velocity_axis, velocity_range=[-100,100])

    if False:
        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image, mask=av_image != av_image)

        plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
                contour_image=av_image, contours=[5,10,15],
                savedir=figure_dir, filename='perseus_nhi_cores_map.png',
                show=True)

    cores = {'IC348':
                {'center_wcs': [(3, 44, 0), (32, 8, 0)],
                 'map': None,
                 'threshold': None,
                 'box_wcs': [(3,46,13), (26,3,24), (3,43,4), (32,25,41)],
                 },
             'NGC1333':
                {'center_wcs': [(3, 29, 11), (31, 16, 53)],
                 'map': None,
                 'threshold': None,
                 'box_wcs': None,
                 },
             'B4':
                {'center_wcs': [(3, 45, 50), (31, 42, 0)],
                 'map': None,
                 'threshold': None,
                 'box_wcs': None,
                 },
             'B5':
                {'center_wcs': [(3, 47, 34), (32, 48, 17)],
                 'map': None,
                 'threshold': None,
                 'box_wcs': None,
                 },
             #'':
             #   {'center_wcs': [],
             #    'map': None,
             #    'threshold': None,
             #    'box_wcs': None,
             #    },
            }

    cores = convert_core_coordinates(cores, h)

    if False:
        nhi_image = np.zeros(nhi_image.shape)

        for core in cores:
        	core_image = np.load(core_dir + core + '.npy')
        	core_indices = np.where(core_image == core_image)
        	nhi_image[core_indices] += core_image[core_indices]

        nhi_image_trim =np.ma.array(nhi_image, mask=((av_image != av_image) &\
                (nhi_image == 0)))

        nhi_image_trim[nhi_image_trim == 0] = np.NaN

        read_ds9_region(av_dir + 'perseus_av_boxes.reg')

        plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
            savedir=figure_dir,
            cores=cores,
            filename='perseus_nhi_core_regions_map.png',
            show=True)

    if True:
        cores = load_ds9_region(cores,
                filename_base = region_dir + 'perseus_av_boxes_',
                header = h)

        # Grab the mask
        mask = np.zeros((nhi_image.shape))
        for core in cores:
        	xy = cores[core]['box_center_pix']
        	box_width = cores[core]['box_width']
        	box_height = cores[core]['box_height']
        	box_angle = cores[core]['box_angle']
        	mask += myg.get_rectangular_mask(nhi_image,
        	        xy[0], xy[1],
                    width = box_width,
                    height = box_height,
                    angle = box_angle)

        	cores[core]['box_vertices'] = myg.get_rect(
                        xy[0], xy[1],
                        width = box_width,
                        height = box_height,
                        angle = box_angle,)

        #print(cores[core]['box_vertices'])
        #print core, xy, box_width, box_height, box_angle

        mask[mask > 1] = 1

        #nhi_image[mask == 0] = np.nan

        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image,
                mask = (av_image != av_image))

        # Plot
        figure_types = ['pdf', 'png']
        for figure_type in figure_types:
            plot_nhi_image(nhi_image=nhi_image_trim,
                    header=hi_header,
                    contour_image=av_image,
                    contours=[2.5,5,8],
                    boxes=True,
                    cores = cores,
                    limits=[47,128,231,222,],
                    title='Perseus: N(HI) map with core boxed-regions.',
                    savedir=figure_dir,
                    filename='perseus_nhi_cores_map.%s' % figure_type,
                    show=False)
def main():
    ''' Executes script.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    reload(myg)

    # define directory locations
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/galfa/'
    core_dir = output_dir + 'core_arrays/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'

    # Load hi fits file
    hi_image, hi_header = pf.getdata(hi_dir + \
            'california_galfa_cube_bin_3.7arcmin.fits', header=True)
    h = hi_header

    # Load av fits file
    av_image, av_header = pf.getdata(av_dir + \
                'california_planck_av_regrid.fits',
            header=True)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    # create nhi image
    nhi_image = calculate_nhi(hi_cube=hi_image,
            velocity_axis=velocity_axis, velocity_range=[-100,100])

    if False:
        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image, mask=av_image != av_image)

        plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
                contour_image=av_image, contours=[5,10,15],
                savedir=figure_dir, filename='california_nhi_cores_map.png',
                show=True)

    # define core properties
    cores = {'L1482':
                {'center_wcs': [(4, 29, 41), (35, 48, 41)],
                 'map': None,
                 'threshold': None,
                 },
              'L1483':
                {'center_wcs': [(4, 34, 57), (36, 18, 12)],
                 'map': None,
                 'threshold': None,
                 },
              'L1478':
                {'center_wcs': [(4, 25, 7), (37, 13, 0)],
                 'map': None,
                 'threshold': None,
                 },
              'L1434':
                {'center_wcs': [(3, 50, 51), (35, 15, 10)],
                 'map': None,
                 'threshold': None,
                 },
              'L1503':
                {'center_wcs': [(4, 40, 27), (29, 57, 12)],
                 'map': None,
                 'threshold': None,
                 },
              'L1507':
                {'center_wcs': [(4, 42, 51), (29, 44, 47)],
                 'map': None,
                 'threshold': None,
                 },
                }

    cores = convert_core_coordinates(cores, h)

    if False:
        nhi_image = np.zeros(nhi_image.shape)

        for core in cores:
        	core_image = np.load(core_dir + core + '.npy')
        	core_indices = np.where(core_image == core_image)
        	nhi_image[core_indices] += core_image[core_indices]

        nhi_image_trim =np.ma.array(nhi_image, mask=((av_image != av_image) &\
                (nhi_image == 0)))

        nhi_image_trim[nhi_image_trim == 0] = np.NaN

        read_ds9_region(av_dir + 'california_av_boxes.reg')

        plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
            savedir=figure_dir,
            cores=cores,
            filename='california_nhi_core_regions_map.png',
            show=True)

    if True:
        cores = load_ds9_region(cores,
                filename_base = region_dir + 'california_av_boxes_',
                header = h)

        # Grab the mask
        mask = np.zeros((nhi_image.shape))
        for core in cores:
        	xy = cores[core]['box_center_pix']
        	box_width = cores[core]['box_width']
        	box_height = cores[core]['box_height']
        	box_angle = cores[core]['box_angle']
        	mask += myg.get_rectangular_mask(nhi_image,
        	        xy[0], xy[1],
                    width = box_width,
                    height = box_height,
                    angle = box_angle)

        	cores[core]['box_vertices'] = myg.get_rect(
                        xy[0], xy[1],
                        width = box_width,
                        height = box_height,
                        angle = box_angle,)

        #print(cores[core]['box_vertices'])
        #print core, xy, box_width, box_height, box_angle

        mask[mask > 1] = 1

        #nhi_image[mask == 0] = np.nan

        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image,
                mask = (av_image != av_image))

        # Plot
        plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
                contour_image=av_image, contours=[3,6,10],
                boxes=True, cores = cores, #limits=[128,37,308,206],
                title='California: N(HI) map with core boxed-regions.',
                savedir=figure_dir, filename='california_nhi_cores_map.pdf',
                show=True)
def main():
    ''' Executes script.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    from os import system,path

    # define directory locations
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/' + \
            'taurus_perseus_comparison/'
    figure_dir = '/d/bip3/ezbc/taurus/figures/'
    perseus_dir = '/d/bip3/ezbc/perseus/'
    taurus_dir = '/d/bip3/ezbc/taurus/'
    av_dir = 'data/av/'
    hi_dir = 'data/galfa/'
    core_dir = output_dir + 'core_arrays/'

    # Load hi fits file
    data_dict = {'perseus':{
    	            'hi_file': perseus_dir + hi_dir + \
    	                    'perseus.galfa.cube.bin.4arcmin.fits',
    	            'hi_noise_file': perseus_dir + hi_dir + \
    	                    'perseus.galfa.cube.bin.4arcmin.noise.fits',
    	            'av_file': perseus_dir + 'data/2mass/' + \
    	                    '2mass_av_lee12_nocal_regrid.fits',
    	            'hi_data': None,
                    'hi_header': None,
                    'hi_vel_axis': None,
                    'av_data': None,
                    'av_header': None,
                    'nhi_image': None,
                    'nhi_error_image': None,
                    'av_noise_region': [54,66,83,103]},

                'taurus':{
    	            'hi_file': taurus_dir + hi_dir + \
    	                    'taurus.galfa.cube.bin.4arcmin.fits',
    	            'hi_noise_file': taurus_dir + hi_dir + \
    	                    'taurus.galfa.cube.bin.4arcmin.noise.fits',
    	            'av_file': taurus_dir + av_dir + \
    	                    'taurus_av_k09_regrid.fits',
    	            'hi_data': None,
                    'hi_header': None,
                    'hi_vel_axis': None,
                    'av_data': None,
                    'av_header': None,
                    'nhi_image': None,
                    'nhi_error_image': None,
                    'av_noise_region': [179,233,195,260]}}

    for data in data_dict:
        # Get hi data
    	data_dict[data]['hi_data'], data_dict[data]['hi_header'] = \
            pf.getdata(data_dict[data]['hi_file'], header=True)

        # Create hi velocity axis
        data_dict[data]['hi_vel_axis'] = \
                make_velocity_axis(data_dict[data]['hi_header'])

        # Calculate or load nhi noise cube
        noise_cube_filename = data_dict[data]['hi_noise_file']
        if not path.isfile(noise_cube_filename):
            noise_cube = calculate_noise_cube(cube = data_dict[data]['hi_data'],
                velocity_axis = data_dict[data]['hi_vel_axis'],
                velocity_noise_range=[-110,-90,90,110], Tsys=30.,
                filename = noise_cube_filename)
        else:
            noise_cube, h = load_fits(noise_cube_filename,
                return_header=True)

        # calculate N(HI) image
    	data_dict[data]['nhi_image'], data_dict[data]['nhi_error_image'] = \
    	    calculate_nhi(cube = data_dict[data]['hi_data'],
                velocity_axis = data_dict[data]['hi_vel_axis'],
                velocity_range=[-5,15], Tsys=30., noise_cube = noise_cube)

        # get av data
    	data_dict[data]['av_data'], data_dict[data]['av_header'] = \
            pf.getdata(data_dict[data]['av_file'], header=True)

    cores = {'L1495':
                {'wcs_position': [15*(4+14/60.), 28+11/60., 0],
                 'map': None,
                 'threshold': 4.75,
                 'box': [206,242,244,287]},
             'L1495A':
                {'wcs_position': [15*(4+18/60.), 28+23/60., 0],
                 'map': None,
                 'threshold': 4.75,
                 'box': [206,212,236,242]},
             'B213':
                {'wcs_position': [15*(4+19/60.), 27+15/60., 0],
                 'map': None,
                 'threshold': 4.75,
                 'box': [177,206,206,242]},
             'B220':
                {'wcs_position': [15*(4+41/60.), 26+7/60., 0],
                 'map': None,
                 'threshold': 7,
                 'box': [179,131,199,157]},
             'L1527':
                {'wcs_position': [15*(4+39/60.), 25+47/60., 0],
                 'map': None,
                 'threshold': 7,
                 'box': [165,152,179,172]},
             'B215':
                {'wcs_position': [15*(4+23/60.), 25+3/60., 0],
                 'map': None,
                 'threshold': 3,
                 'box': [143,209,177,243]},
             'L1524':
                {'wcs_position': [15*(4+29/60.), 24+31/60., 0],
                 'map': None,
                 'threshold': 3,
                 'box': [138,177,167,209]}}


    print_properties(data_dict)
    print_core_properties(data_dict, cores)

    return data_dict[data]['nhi_image']
def main(dgr=None,
         vel_range=None,
         vel_range_type='single',
         region=None,
         av_data_type='planck',
         use_binned_images=False):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Name of noise cube
    noise_cube_filename = \
            'taurus_hi_galfa_cube_regrid_planckres_noise' + bin_string + \
            '.fits'

    # Name of property files results are written to
    prop_file = 'taurus_global_properties_' + av_data_type + '_scaled'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/taurus/figures/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/hi/'
    co_dir = '/d/bip3/ezbc/taurus/data/co/'
    core_dir = '/d/bip3/ezbc/taurus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/taurus/data/python_output/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'taurus_av_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'taurus_av_error_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'taurus_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                            return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(co_dir + \
                    'taurus_co_cfa_cube_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)

    # Load global properties of cloud
    # global properties written from script
    # 'av/taurus_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nReading global parameter file\n' + prop_file + '.txt')
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']
    if dgr is not None:
        props['dust2gas_ratio']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio']['value']

    # define core properties
    with open(core_dir + 'taurus_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'taurus_av_boxes_',
                            header=hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    # Get mask and mask images
    mask = np.asarray(props['mask' + bin_string])

    mask_images = False
    av_image_masked = np.copy(av_image)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_error_masked = np.copy(av_image_error)
    #av_image_masked[(mask == 1) & (region_mask == 1)] = np.nan

    av_model_masked = np.copy(av_model)
    #av_model_masked[(mask == 1) & (region_mask == 1)] = np.nan

    if mask_images:
        av_image_masked[mask] = np.nan
        av_error_masked[mask] = np.nan
        av_model_masked[mask] = np.nan

    indices = ((np.isnan(av_model_masked)) & \
               (np.isnan(av_image_masked)) & \
               (np.isnan(av_image_error)))

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    if 0:
        import matplotlib.pyplot as plt
        av_plot_data = np.copy(av_image)
        av_plot_data[mask] = np.nan
        plt.imshow(av_plot_data, origin='lower')
        plt.xlim(props['plot_limit_bin']['pixel'][0:3:2])
        plt.ylim(props['plot_limit_bin']['pixel'][1:4:2])
        plt.show()

    # Create HI spectrum
    hi_cube[hi_cube != hi_cube] = 0
    hi_cube[:, mask] = 0
    hi_spectrum = np.mean(hi_cube, axis=(1, 2))

    if not use_binned_images:
        # Derive CO spectrum
        co_data[:, mask] = 0
        co_data[np.isnan(co_data)] = 0
        co_spectrum = np.mean(co_data, axis=(1, 2))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'single_vel_range/taurus_av_model_map_' + \
                    av_data_type + bin_string
                #'dgr{0:.3f}_'.format(dgr) + \
                #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/taurus_av_model_map_' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(
                        vel_range[i, 0], vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'taurus_av_model_map_region{0:.0f}'.format(region)

        print('\nSaving Av model image to \n' + figure_dir + filename + \
                '.' + figure_type)

        if 0:
            plot_av_model(av_image=av_image_masked,
                          av_model=av_model_masked,
                          header=av_header,
                          results=props,
                          limits=props['plot_limit' + bin_string]['pixel'],
                          savedir=figure_dir + 'maps/av_models/',
                          filename=filename + '.' + figure_type,
                          show=False)

        if 1:
            #if not use_binned_images:
            if 0:
                plot_av_model(
                    av_image=av_image_masked,
                    av_model=av_model_masked,
                    header=av_header,
                    results=props,
                    hi_velocity_axis=velocity_axis,
                    vel_range=vel_range,
                    hi_spectrum=hi_spectrum,
                    #hi_limits=[-15, 25, -1, 10],
                    hi_limits=[-15, 25, None, None],
                    co_spectrum=co_spectrum,
                    co_velocity_axis=co_velocity_axis,
                    limits=props['plot_limit' + bin_string]['pixel'],
                    savedir=figure_dir + 'maps/av_models/',
                    filename=filename + '_spectra' + '.' + figure_type,
                    show=False)

            plot_avmod_vs_av(
                (av_model_masked, ),
                (av_image_masked, ),
                av_errors=(av_error_masked, ),
                #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
                limits=[0, 20, 0, 3],
                savedir=figure_dir + 'av/',
                gridsize=(10, 10),
                #scale=('log', 'log'),
                #scale=('linear', 'linear'),
                filename='taurus_avmod_vs_av%s.%s' % (bin_string, figure_type),
                show=False,
                std=0.22,
            )

        if 0:
            plot_power_spectrum(av_image_masked - av_model_masked,
                filename_prefix='taurus_av_resid_power_spectrum_' + \
                                '{0:s}'.format(av_data_type),
                filename_suffix='.{0:s}'.format(figure_type),
                savedir=figure_dir + 'power_spectra/',
                show=False)
def main(dgr=None, vel_range=None, vel_range_type='single', region=None,
        av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    #import pyfits as fits
    from astropy.io import fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system,path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=hi_header, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir + noise_cube_filename,
            header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    props['plot_limit']['wcs'] = (((5, 20, 0), (19, 0 ,0)),
                                  ((2, 30, 0), (37, 0, 0))
                                  )

    props['region_name_pos'] = {
             #'taurus 1' : {'wcs' : ((3, 50,  0),
             #                       (21.5, 0, 0)),
             #             },
             #'taurus 2' : {'wcs' : ((5, 10,  0),
             #                       (21.5, 0, 0)),
             #             },
             'taurus' : {'wcs' : ((4, 40,  0),
                                  (21, 0, 0)),
                          },
             'perseus' : {'wcs' : ((3, 30,  0),
                                   (26, 0, 0)),
                          },
             #'perseus 1' : {'wcs' : ((3, 0,  0),
             #                      (34, 0, 0)),
             #             },
             #'perseus 2' : {'wcs' : ((3, 10,  0),
             #                      (22.5, 0, 0)),
             #             },
             'california' : {'wcs' : ((4, 28,  0),
                                      (34, 0, 0)),
                             },
             }

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits',
                                              'plot_limit',
                                              'region_name_pos'))

    props['plot_limit']['wcs'] = [15*(5+20./60), 15*(2+30./60.), 17, 38.5]


    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]),
                       (pix[1], pix[2]),
                       (pix[3], pix[2]),
                       (pix[3], pix[0])
                       )

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'multicloud_av_nhi_map' + \
                    '.%s' % figure_type
                    #av_data_type + \
                    #'dgr{0:.3f}_'.format(dgr) + \
                    #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                    #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/multicloud_av_model_map' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(vel_range[i, 0],
                                                              vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'multicloud_av_model_map_region{0:.0f}'.format(region) + \
                       '.{0:s}'.format(figure_type)

        filename = 'av_map'
        filename = figure_dir + 'maps/' + filename + '.' + figure_type
        print('\nSaving Av model image to \n' + filename)

        plot_av_image(av_image=av_image,
                       header=av_header,
                       limits=[15*(5+20./60), 15*(2+30./60.), 17, 38.5],
                       limits_type='wcs',
                       regions=props['regions'],
                       props=props,
                       av_vlimits=(0,15.5),
                       filename=filename,
                       show=False)

        if 0:
            filename = 'av_nhi_map'
            filename = figure_dir + 'maps/' + filename + '.' + figure_type
            print('\nSaving NHI + Av maps to \n' + filename)
            plot_nhi_image(nhi_image=nhi_image,
                           header=av_header,
                           av_image=av_image,
                           limits=props['plot_limit']['wcs'],
                           limits_type='wcs',
                           regions=props['regions'],
                           props=props,
                           hi_vlimits=(0,20),
                           av_vlimits=(0,15.5),
                           #av_vlimits=(0.1,30),
                           filename=filename,
                           show=False)
示例#29
0
    def test_scatter_contour():

        from astropy.io import fits
        from myimage_analysis import calculate_nhi
        import mygeometry as myg
        from mycoords import make_velocity_axis

        # Parameters
        # ----------
        levels = (0.99, 0.985, 0.7)
        levels = (
            0.999,
            0.998,
            0.96,
            0.86,
            0.58,
        )
        levels = 7
        levels = np.logspace(np.log10(0.995), np.log10(0.50), 5)
        log_counts = 0
        limits = [1, 10, -3, 30]
        limits = None

        # Begin test
        # ----------
        data_dir = '/d/bip3/ezbc/perseus/data/'
        av = fits.getdata(data_dir +
                          'av/perseus_av_planck_tau353_5arcmin.fits')
        hi, hi_header = fits.getdata(data_dir + \
                          'hi/perseus_hi_galfa_cube_regrid_planckres.fits',
                          header=True)

        hi_vel_axis = make_velocity_axis(hi_header)

        nhi = calculate_nhi(
            cube=hi,
            velocity_axis=hi_vel_axis,
            velocity_range=[0, 10],
        )

        # Drop the NaNs from the images
        indices = np.where((av == av) &\
                           (nhi == nhi)
                           )

        av_nonans = av[indices]
        nhi_nonans = nhi[indices]

        fig, ax = plt.subplots()

        if limits is None:
            xmin = np.min(nhi_nonans)
            ymin = np.min(av_nonans)
            xmax = np.max(nhi_nonans)
            ymax = np.max(av_nonans)
            xscalar = 0.25 * xmax
            yscalar = 0.25 * ymax
            limits = [
                xmin - xscalar, xmax + xscalar, ymin - yscalar, ymax + yscalar
            ]

        contour_range = ((limits[0], limits[1]), (limits[2], limits[3]))

        cmap = myplt.truncate_colormap(plt.cm.binary, 0.2, 1, 1000)

        l1 = myplt.scatter_contour(
            nhi_nonans.ravel(),
            av_nonans.ravel(),
            threshold=3,
            log_counts=log_counts,
            levels=levels,
            ax=ax,
            histogram2d_args=dict(bins=30, range=contour_range),
            plot_args=dict(marker='o',
                           linestyle='none',
                           color='black',
                           alpha=0.3,
                           markersize=2),
            contour_args=dict(
                #cmap=plt.cm.binary,
                cmap=cmap,
                #cmap=cmap,
            ),
        )

        scale = ['linear', 'linear']
        ax.set_xscale(scale[0], nonposx='clip')
        ax.set_yscale(scale[1], nonposy='clip')

        ax.set_xlim(limits[0], limits[1])
        ax.set_ylim(limits[2], limits[3])

        # Adjust asthetics
        ax.set_xlabel(r'$N($H$\textsc{i}) \times\,10^{20}$ cm$^{-2}$')
        ax.set_ylabel(r'$A_V$ [mag]')
        #ax.set_title(core_names[i])
        ax.legend(loc='lower right')

        plt.savefig('test_plots/test_scatter_contour.png')
示例#30
0
def run_cloud_analysis(global_args, ):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
        'figure_dir': figure_dir,
        'cloud_name': cloud_name,
        'filename_base': filename_base,
        'plot_diagnostics': global_args['plot_diagnostics'],
        #'av_nhi_contour': av_nhi_contour,
        'av_nhi_contour': True,
        'av_nhi_limits': [0, 20, -1, 9],
        #'av_nhi_limits': None,
    }

    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])

    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)

    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)

    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                                 statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
        'velocity_range': velocity_range,
        'gauss_fits': gauss_fits,
        'comp_num': comp_num,
        'hi_range_error': hi_range_error,
        'vel_range': velocity_range,
        'gauss_fit_kwargs': gauss_fit_kwargs,
    }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_spectrum,
        gauss_fits=gauss_fits,
        comp_num=comp_num,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_dr1_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_dr1_spectrum,
        gauss_fits=gauss_fits_dr1,
        comp_num=comp_num_dr1,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range_dr1,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(
        nhi_image=nhi_image / nhi_image_dr1,
        header=hi_header,
        limits=[65, 45, 25, 35],
        filename=filename,
        show=0,
        cb_text='DR2 / DR1',
        #hi_vlimits=[0.91, 0.93],
    )
示例#31
0
def plot_hi_spectrum(cloud_results, plot_co=1):

    filename_base = \
            cloud_results['figure_dir'] + 'diagnostics/' + \
            cloud_results['filename_extension'] + '_hi_spectrum'

    from astropy.io import fits
    from mycoords import make_velocity_axis
    from myimage_analysis import bin_image
    from myio import check_file

    cloud = cloud_results['cloud']

    if plot_co:

        co_filename = cloud.co_filename

        if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
            co_filename = co_filename.replace('.fits', '_bin.fits')

        exists = \
            check_file(co_filename, clobber=False)

        if not exists:
            co_data, co_header = fits.getdata(co_filename,
                                                          header=True,
                                                          )
            cloud.co_data, cloud.co_header = \
                bin_image(co_data,
                          binsize=(1, cloud.binsize, cloud.binsize),
                          header=co_header,
                          statistic=np.nanmean)

            fits.writeto(cloud.co_filename.replace('.fits', '_bin.fits'),
                         cloud.co_data,
                         cloud.co_header,
                         )
        else:
            cloud.co_data, cloud.co_header = \
                fits.getdata(co_filename,
                                                          header=True,
                                                          )

        cloud.co_vel_axis = make_velocity_axis(cloud.co_header)

    # Derive relevant region
    if cloud_results['args']['bin_procedure'] in ('all', 'mle'):
        av_filename = cloud.av_filename_bin
        hi_filename = cloud.hi_filename_bin
    else:
        av_filename = cloud.av_filename
    hi_mask = cloud.region_mask
    av_data, av_header = fits.getdata(av_filename, header=True)
    cloud.hi_data, cloud.hi_header = \
            fits.getdata(hi_filename, header=True)
    cloud.load_region(cloud.region_filename, header=av_header)
    cloud._derive_region_mask(av_data=av_data)
    co_mask = cloud.region_mask
    hi_mask = co_mask

    import matplotlib.pyplot as plt
    plt.close(); plt.clf();
    co = np.copy(cloud.co_data[30,:,:])
    co[co_mask] = np.nan
    plt.imshow(co, origin='lower')
    plt.savefig('/usr/users/ezbc/Desktop/comap_' + cloud.region + '.png')

    assert all((cloud.hi_data.shape, cloud.co_data.shape,
                cloud.region_mask.shape))

    cloudpy.plot_hi_spectrum(cloud,
                      filename=filename_base + '.png',
                      limits=[-50, 30, -10, 70],
                      plot_co=plot_co,
                      hi_mask=hi_mask,
                      co_mask=co_mask,
                      )
示例#32
0
    def test_scatter_contour():

        from astropy.io import fits
        from myimage_analysis import calculate_nhi
        import mygeometry as myg
        from mycoords import make_velocity_axis


        # Parameters
        # ----------
        levels = (0.99, 0.985, 0.7)
        levels = (0.999, 0.998, 0.96, 0.86, 0.58,)
        levels = 7
        levels = np.logspace(np.log10(0.995), np.log10(0.50), 5)
        log_counts = 0
        limits = [1, 10, -3, 30]
        limits = None

        # Begin test
        # ----------
        data_dir = '/d/bip3/ezbc/perseus/data/'
        av = fits.getdata(data_dir + 'av/perseus_av_planck_tau353_5arcmin.fits')
        hi, hi_header = fits.getdata(data_dir + \
                          'hi/perseus_hi_galfa_cube_regrid_planckres.fits',
                          header=True)

        hi_vel_axis = make_velocity_axis(hi_header)

        nhi = calculate_nhi(cube=hi,
                            velocity_axis=hi_vel_axis,
                            velocity_range=[0, 10],
                            )

        # Drop the NaNs from the images
        indices = np.where((av == av) &\
                           (nhi == nhi)
                           )

        av_nonans = av[indices]
        nhi_nonans = nhi[indices]

        fig, ax = plt.subplots()

        if limits is None:
            xmin = np.min(nhi_nonans)
            ymin = np.min(av_nonans)
            xmax = np.max(nhi_nonans)
            ymax = np.max(av_nonans)
            xscalar = 0.25 * xmax
            yscalar = 0.25 * ymax
            limits = [xmin - xscalar, xmax + xscalar,
                      ymin - yscalar, ymax + yscalar]

        contour_range = ((limits[0], limits[1]),
                         (limits[2], limits[3]))

        cmap = myplt.truncate_colormap(plt.cm.binary, 0.2, 1, 1000)

        l1 = myplt.scatter_contour(nhi_nonans.ravel(),
                             av_nonans.ravel(),
                             threshold=3,
                             log_counts=log_counts,
                             levels=levels,
                             ax=ax,
                             histogram2d_args=dict(bins=30,
                                                   range=contour_range),
                             plot_args=dict(marker='o',
                                            linestyle='none',
                                            color='black',
                                            alpha=0.3,
                                            markersize=2),
                             contour_args=dict(
                                               #cmap=plt.cm.binary,
                                               cmap=cmap,
                                               #cmap=cmap,
                                               ),
                             )

        scale = ['linear', 'linear']
        ax.set_xscale(scale[0], nonposx = 'clip')
        ax.set_yscale(scale[1], nonposy = 'clip')

        ax.set_xlim(limits[0],limits[1])
        ax.set_ylim(limits[2],limits[3])

        # Adjust asthetics
        ax.set_xlabel(r'$N($H$\textsc{i}) \times\,10^{20}$ cm$^{-2}$')
        ax.set_ylabel(r'$A_V$ [mag]')
        #ax.set_title(core_names[i])
        ax.legend(loc='lower right')

        plt.savefig('test_plots/test_scatter_contour.png')
def main():
    ''' Executes script.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    reload(myg)

    # define directory locations
    output_dir = '/d/bip3/ezbc/taurus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/taurus/figures/maps/'
    av_dir = '/d/bip3/ezbc/taurus/data/av/'
    hi_dir = '/d/bip3/ezbc/taurus/data/galfa/'
    core_dir = output_dir + 'core_arrays/'
    region_dir = '/d/bip3/ezbc/taurus/data/python_output/ds9_regions/'

    # Load hi fits file
    hi_image, hi_header = pf.getdata(hi_dir + \
            'taurus_galfa_cube_bin_3.7arcmin.fits', header=True)
    h = hi_header

    # Load av fits file
    av_image, av_header = pf.getdata(av_dir + 'taurus_av_k09_regrid.fits',
                                     header=True)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    # create nhi image
    nhi_image = calculate_nhi(hi_cube=hi_image,
                              velocity_axis=velocity_axis,
                              velocity_range=[-100, 100])

    if False:
        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image, mask=av_image != av_image)

        plot_nhi_image(nhi_image=nhi_image_trim,
                       header=hi_header,
                       contour_image=av_image,
                       contours=[5, 10, 15],
                       savedir=figure_dir,
                       filename='taurus_nhi_cores_map.png',
                       show=True)

    cores = {
        'L1495': {
            'center_wcs': [(4, 14, 0), (28, 11, 0)],
            'map': None,
            'threshold': 4.75,
            'box_wcs': [(4, 16, 30), (27, 44, 30), (4, 5, 20), (28, 28, 33)]
        },
        'L1495A': {
            'center_wcs': [(4, 18, 0), (28, 23., 0)],
            'map': None,
            'threshold': 4.75,
            'box_wcs': [(4, 28, 23), (28, 12, 50), (4, 16, 23), (29, 46, 5)],
        },
        'B213': {
            'center_wcs': [(4, 19, 0), (27, 15, 0)],
            'map': None,
            'threshold': 4.75,
            'box_wcs': [(4, 22, 27), (26, 45, 47), (4, 5, 25), (27, 18, 48)],
        },
        'B220': {
            'center_wcs': [(4, 41, 0.), (26, 7, 0)],
            'map': None,
            'threshold': 7,
            'box_wcs': [(4, 47, 49), (25, 31, 13), (4, 40, 37), (27, 31, 17)],
        },
        'L1527': {
            'center_wcs': [(4, 39, 0.), (25, 47, 0)],
            'map': None,
            'threshold': 7,
            'box_wcs': [(4, 40, 13), (24, 46, 38), (4, 34, 35), (25, 56, 7)],
        },
        'B215': {
            'center_wcs': [(4, 23, 0), (25, 3, 0)],
            'map': None,
            'threshold': 3,
            'box_wcs': [(4, 24, 51), (22, 36, 7), (4, 20, 54), (25, 26, 31)],
        },
        'L1524': {
            'center_wcs': [(4, 29, 0.), (24, 31., 0)],
            'map': None,
            'threshold': 3,
            'box_wcs': [(4, 31, 0), (22, 4, 6), (4, 25, 33), (25, 0, 55)],
        }
    }

    cores = convert_core_coordinates(cores, h)

    if False:
        nhi_image = np.zeros(nhi_image.shape)

        for core in cores:
            core_image = np.load(core_dir + core + '.npy')
            core_indices = np.where(core_image == core_image)
            nhi_image[core_indices] += core_image[core_indices]

        nhi_image_trim =np.ma.array(nhi_image, mask=((av_image != av_image) &\
                (nhi_image == 0)))

        nhi_image_trim[nhi_image_trim == 0] = np.NaN

        read_ds9_region(av_dir + 'taurus_av_boxes.reg')

        plot_nhi_image(nhi_image=nhi_image_trim,
                       header=hi_header,
                       savedir=figure_dir,
                       cores=cores,
                       filename='taurus_nhi_core_regions_map.png',
                       show=True)

    if True:
        cores = load_ds9_region(cores,
                                filename_base=region_dir + 'taurus_av_boxes_',
                                header=h)

        # Grab the mask
        mask = np.zeros((nhi_image.shape))
        for core in cores:
            xy = cores[core]['box_center_pix']
            box_width = cores[core]['box_width']
            box_height = cores[core]['box_height']
            box_angle = cores[core]['box_angle']
            mask += myg.get_rectangular_mask(nhi_image,
                                             xy[0],
                                             xy[1],
                                             width=box_width,
                                             height=box_height,
                                             angle=box_angle)

            cores[core]['box_vertices'] = myg.get_rect(
                xy[0],
                xy[1],
                width=box_width,
                height=box_height,
                angle=box_angle,
            )

        #print(cores[core]['box_vertices'])
        #print core, xy, box_width, box_height, box_angle

        mask[mask > 1] = 1

        #nhi_image[mask == 0] = np.nan

        # trim hi_image to av_image size
        nhi_image_trim = np.ma.array(nhi_image, mask=(av_image != av_image))

        # Plot
        figure_types = ['pdf', 'png']
        for figure_type in figure_types:
            plot_nhi_image(nhi_image=nhi_image_trim, header=hi_header,
                    contour_image=av_image, contours=[5,10,15],
                    boxes=True, cores = cores, limits=[128,37,308,206],
                    title='Taurus: N(HI) map with core boxed-regions.',
                    savedir=figure_dir, filename='taurus_nhi_cores_map.%s' % \
                            figure_type,
                    show=False)
def main():

    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube

    global hi_cube
    global hi_velocity_axis
    global hi_noise_cube
    global av_image
    global av_image_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Course, large grid or fine, small grid?
    grid_res = "fine"
    grid_res = "course"

    # Use multithreading?
    multithread = False

    # Use Av+CO mask or only CO?
    av_and_co_mask = True

    # Derive CO mask? If co_thres = None, co_thres will be 2 * std(co)
    co_thres = 6.00  # K km/s

    # Threshold of Av below which we expect only atomic gas, in mag
    av_thres = 1.4

    # Results and fits filenames
    if av_and_co_mask:
        likelihood_filename = "taurus_nhi_av_likelihoods_co_" + "av{0:.1f}mag".format(av_thres)
        results_filename = "taurus_likelihood_co_" + "av{0:.1f}mag".format(av_thres)
    else:
        likelihood_filename = "taurus_nhi_av_likelihoods_co_only"
        results_filename = "taurus_likelihood_co_only"

    # Name of property files results are written to
    global_property_file = "taurus_global_properties.txt"
    core_property_file = "taurus_core_properties.txt"

    # Name of noise cube
    noise_cube_filename = "taurus_hi_galfa_cube_regrid_planckres_noise.fits"

    # Define ranges of parameters
    if center_vary and width_vary and dgr_vary:
        likelihood_filename += "_width_dgr_center"
        results_filename += "_width_dgr_center"

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1e-2, 1, 2e-2)
    elif not center_vary and width_vary and dgr_vary:
        if grid_res == "course":
            likelihood_filename += "_dgr_width_lowres"
            results_filename += "_dgr_width_lowres"
            velocity_centers = np.arange(-5, 10, 10 * 0.16667)
            velocity_widths = np.arange(1, 30, 10 * 0.16667)
            dgrs = np.arange(0.05, 0.7, 2e-2)
        elif grid_res == "fine":
            likelihood_filename += "_dgr_width_highres"
            results_filename += "_dgr_width_highres"
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 100, 0.16667)
            dgrs = np.arange(0.15, 0.4, 1e-3)
            velocity_widths = np.arange(1, 15, 0.16667)
            dgrs = np.arange(0.1, 0.9, 3e-3)
            # velocity_widths = np.arange(1, 40, 1)
            # dgrs = np.arange(0.15, 0.4, 1e-1)
    elif center_vary and width_vary and not dgr_vary:
        likelihood_filename += "_width_center"
        results_filename += "_width_center"

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)
    elif not center_vary and width_vary and not dgr_vary:
        likelihood_filename += "_width"
        results_filename += "_width"

        velocity_centers = np.arange(5, 6, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)

    # define directory locations
    # --------------------------
    output_dir = "/d/bip3/ezbc/taurus/data/python_output/nhi_av/"
    figure_dir = "/d/bip3/ezbc/taurus/figures/hi_velocity_range/"
    av_dir = "/d/bip3/ezbc/taurus/data/av/"
    hi_dir = "/d/bip3/ezbc/taurus/data/hi/"
    co_dir = "/d/bip3/ezbc/taurus/data/co/"
    core_dir = "/d/bip3/ezbc/taurus/data/python_output/core_properties/"
    property_dir = "/d/bip3/ezbc/taurus/data/python_output/"
    region_dir = "/d/bip3/ezbc/taurus/data/python_output/ds9_regions/"
    likelihood_dir = "/d/bip3/ezbc/taurus/data/python_output/nhi_av/"

    # load Planck Av and GALFA HI images, on same grid
    av_data, av_header = load_fits(av_dir + "taurus_av_planck_5arcmin.fits", return_header=True)

    av_data_error, av_error_header = load_fits(av_dir + "taurus_av_error_planck_5arcmin.fits", return_header=True)

    hi_data, h = load_fits(hi_dir + "taurus_hi_galfa_cube_regrid_planckres.fits", return_header=True)

    co_data, co_header = load_fits(co_dir + "taurus_co_cfa_cube_regrid_planckres.fits", return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)
    co_velocity_axis = make_velocity_axis(co_header)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(
            cube=hi_data,
            velocity_axis=velocity_axis,
            velocity_noise_range=[90, 110],
            header=h,
            Tsys=30.0,
            filename=hi_dir + noise_cube_filename,
        )
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename, return_header=True)

    # define core properties
    with open(core_dir + core_property_file, "r") as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, "r") as f:
        global_props = json.load(f)

    # Change WCS coords to pixel coords of images
    cores = convert_core_coordinates(cores, h)
    cores = load_ds9_region(cores, filename_base=region_dir + "taurus_av_boxes_", header=h)
    global_props = convert_limit_coordinates(global_props, header=av_header)

    print ("\nCalculating likelihoods globally")

    co_data_nonans = np.copy(co_data)
    co_data_nonans[np.isnan(co_data_nonans)] = 0.0

    # Set velocity center as CO peak
    if not center_vary:
        co_spectrum = np.sum(co_data_nonans, axis=(1, 2))
        co_avg_vel = np.average(co_velocity_axis, weights=co_spectrum)
        co_peak_vel = co_velocity_axis[co_spectrum == np.max(co_spectrum)]
        # velocity_centers = np.arange(co_peak_vel, co_peak_vel + 1, 1)
        velocity_centers = np.arange(co_avg_vel, co_avg_vel + 1, 1)

        print ("\nVelocity center from CO = " + "{0:.2f} km/s".format(velocity_centers[0]))

    # Create mask where CO is present
    core_mask = np.zeros(av_data.shape)
    # for core in cores:
    #    # Grab the mask
    #    core_mask += myg.get_polygon_mask(av_data,
    #            cores[core]['box_vertices_rotated'])

    # Calc moment 0 map of CO
    co_mom0 = np.sum(co_data_nonans, axis=0)

    # calc noise without any emission if CO threshold not already set
    if co_thres is None:
        co_noise = calc_co_noise(co_mom0, global_props)
        co_thres = 2.0 * co_noise

    # Derive relevant region
    pix = global_props["region_limit"]["pixel"]
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]), (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_data, region_vertices)

    print ("\nRegion size = " + "{0:.0f} pix".format(region_mask[region_mask == 1].size))

    # Get indices which trace only atomic gas, i.e., no CO emission
    if av_and_co_mask:
        indices = ((co_mom0 < co_thres) & (av_data < av_thres)) & (region_mask == 1)
    elif not av_and_co_mask:
        indices = (co_mom0 < co_thres) & (region_mask == 1)
        av_thres = None

    # Write mask of pixels not used
    mask = ~indices

    # Mask global data with CO indices
    hi_data_sub = np.copy(hi_data[:, indices])
    noise_cube_sub = np.copy(noise_cube[:, indices])
    av_data_sub = np.copy(av_data[indices])
    av_error_data_sub = np.copy(av_data_error[indices])

    # import matplotlib.pyplot as plt
    # av_plot_data = np.copy(av_data)
    # av_plot_data[~indices] = np.nan
    # plt.imshow(av_plot_data, origin='lower')
    # plt.contour(co_mom0, levels=(6, 12, 24), origin='lower')
    # plt.show()
    # plt.clf()
    # plt.close()

    # Plot the masked image
    av_data_masked = np.copy(av_data)
    av_data_masked[~indices] = np.nan
    figure_types = ["png"]
    for figure_type in figure_types:
        plot_av_image(
            av_image=av_data_masked,
            header=av_header,
            savedir=figure_dir + "../maps/",
            limits=global_props["region_limit"]["pixel"],
            filename="taurus_dgr_co_masked_map." + figure_type,
            show=0,
        )

    # Set global variables
    hi_cube = hi_data_sub
    hi_velocity_axis = velocity_axis
    hi_noise_cube = noise_cube_sub
    av_image = av_data_sub
    av_image_error = av_error_data_sub

    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    # likelihoodelate each core region Av and N(HI) for velocity ranges
    vel_range_confint, dgr_confint, likelihoods, center_likelihood, width_likelihood, dgr_likelihood, center_max, width_max, dgr_max = calc_likelihood_hi_av(
        dgrs=dgrs,
        velocity_centers=velocity_centers,
        velocity_widths=velocity_widths,
        return_likelihoods=True,
        plot_results=True,
        results_filename=results_filename,
        likelihood_filename=likelihood_dir + likelihood_filename + "_global.fits",
        clobber=clobber,
        conf=conf,
        contour_confs=contour_confs,
        multithread=multithread,
    )
    vel_range_max = (center_max - width_max / 2.0, center_max + width_max / 2.0)

    print ("\nHI velocity integration range:")
    print ("%.1f to %.1f km/s" % (vel_range_confint[0], vel_range_confint[1]))
    print ("\nDGR:")
    print ("%.1f x 10^-20 cm^2 mag" % (dgr_confint[0]))

    # Calulate chi^2 for best fit models
    # ----------------------------------
    nhi_image_temp, nhi_image_error = calculate_nhi(
        cube=hi_data, velocity_axis=hi_velocity_axis, velocity_range=vel_range_max, noise_cube=noise_cube
    )
    av_image_model = nhi_image_temp * dgr_max
    # avoid NaNs
    indices = (av_image_model == av_image_model) & (av_data == av_data)
    # add nan locations to the mask
    mask[~indices] = 1

    # count number of pixels used in analysis
    npix = mask[~mask].size

    # finally calculate chi^2
    chisq = np.sum((av_data[~mask] - av_image_model[~mask]) ** 2 / av_data_error[~mask] ** 2) / av_data[~mask].size

    print (
        "\nTotal number of pixels in analysis, after masking = " + "{0:.0f}".format(npix)
    ) + "\nGiven a CO threshold of {0:.2f} K km/s".format(co_thres) + "\nand a Av threshold of {0:.2f} mag".format(
        av_thres
    )

    print ("\nReduced chi^2 = {0:.1f}".format(chisq))

    # Write results to global properties
    global_props["dust2gas_ratio"] = {}
    global_props["dust2gas_ratio_error"] = {}
    global_props["hi_velocity_width"] = {}
    global_props["dust2gas_ratio_max"] = {}
    global_props["hi_velocity_center_max"] = {}
    global_props["hi_velocity_width_max"] = {}
    global_props["hi_velocity_range_max"] = {}
    global_props["av_threshold"] = {}
    global_props["co_threshold"] = {}
    global_props["hi_velocity_width"]["value"] = vel_range_confint[1] - vel_range_confint[0]
    global_props["hi_velocity_width"]["unit"] = "km/s"
    global_props["hi_velocity_range"] = vel_range_confint[0:2]
    global_props["hi_velocity_range_error"] = vel_range_confint[2:]
    global_props["dust2gas_ratio"]["value"] = dgr_confint[0]
    global_props["dust2gas_ratio_error"]["value"] = dgr_confint[1:]
    global_props["dust2gas_ratio_max"]["value"] = dgr_max
    global_props["hi_velocity_center_max"]["value"] = center_max
    global_props["hi_velocity_width_max"]["value"] = width_max
    global_props["hi_velocity_range_max"]["value"] = vel_range_max
    global_props["hi_velocity_range_conf"] = conf
    global_props["center_likelihood"] = center_likelihood.tolist()
    global_props["width_likelihood"] = width_likelihood.tolist()
    global_props["dgr_likelihood"] = dgr_likelihood.tolist()
    global_props["vel_centers"] = velocity_centers.tolist()
    global_props["vel_widths"] = velocity_widths.tolist()
    global_props["dgrs"] = dgrs.tolist()
    global_props["likelihoods"] = likelihoods.tolist()
    global_props["av_threshold"]["value"] = av_thres
    global_props["av_threshold"]["unit"] = "mag"
    global_props["co_threshold"]["value"] = co_thres
    global_props["co_threshold"]["unit"] = "K km/s"
    global_props["chisq"] = chisq
    global_props["npix"] = npix
    global_props["mask"] = mask.tolist()

    with open(property_dir + global_property_file, "w") as f:
        json.dump(global_props, f)
def main():

    import grid
    import numpy as np
    import numpy
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or correlation with Av?
    hi_av_correlation = True
    velocity_centers = np.arange(-15, 30, 4)
    velocity_widths = np.arange(1, 80, 4)

    # Which likelihood fits should be performed?
    core_correlation = 0
    global_correlation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 100

    # Check if likelihood file already written, rewrite?>
    likelihood_filename = 'california_nhi_av_likelihoods'
    clobber = 0
    hi_vel_range_conf = 0.50

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=h, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.22e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'california_av_boxes_',
            header = h)

    if core_correlation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # Correlate each core region Av and N(HI) for velocity ranges
            vel_range_confint, correlations, center_corr, width_corr = \
                    correlate_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgr=dgr,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_correlations=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    hi_vel_range_conf=hi_vel_range_conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                         vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_corr'] = center_corr.tolist()
            cores[core]['width_corr'] = width_corr.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_correlation:
        print('\nCalculating correlations globally')

        indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + 'california_logL_global.png'

        # Correlate each core region Av and N(HI) for velocity ranges
        vel_range_confint, correlations, center_corr, width_corr = \
                correlate_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgr=dgr,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_correlations=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + '_global.fits',
                                clobber=clobber,
                                hi_vel_range_conf=hi_vel_range_conf)

        '''
        fit_hi_vel_range(guesses=(0, 30),
                         av_image=av_data_sub,
                         av_image_error=av_error_data_sub,
                         hi_cube=hi_data_sub,
                         hi_velocity_axis=velocity_axis,
                         hi_noise_cube=noise_cube_sub,
                         dgr=dgr)
        '''

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                     vel_range_confint[1]))

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['hi_velocity_range_conf'] = hi_vel_range_conf
        global_props['center_corr'] = center_corr.tolist()
        global_props['width_corr'] = width_corr.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
def main():

    from myimage_analysis import bin_image, calculate_nhi
    from mycoords import make_velocity_axis
    from astropy.io import fits
    import numpy as np

    os.chdir('/d/bip3/ezbc/shield/749237_lowres/')

    # If true, deletes files to be written
    clobber = 1

    # First, change zeros in lee image to nans
    in_images = ('749237_rebin_cube.fits',)

    # Load the images into miriad
    out_images = []
    for in_image in in_images:

        print('Binning cube:\n' + in_image)

        cube, header = fits.getdata(in_image, header=True)

        # set freq0 setting
        header['FREQ0'] = 1.4204058E+09
        header['RESTFREQ'] = 1.4204058E+09
        header['CTYPE3'] = 'VELO'

        beamsize = header['BMAJ']
        cdelt = np.abs(header['CDELT1'])
        binsize = int(beamsize / cdelt)

        print('\tBinsize = ' + str(binsize))


        if 1:
            # cube measurement error = 700 uJy/Beam = 0.7 mJy/Beam
            # cube flux calibration error = 10%
            # add errors quadratically
            cube_std = np.nanstd(cube[0, :, :])
            cube_error = ((0.1 * cube)**2 + cube_std**2)**0.5

        cube_bin, header_bin = bin_image(cube,
                                         binsize=(1, binsize, binsize),
                                         header=header,
                                         statistic=np.nanmean,
                                         )

        # cube measurement error = 700 uJy/Beam = 0.7 mJy/Beam
        # cube flux calibration error = 10%
        # add errors quadratically
        cube_bin_std = np.nanstd(cube_bin[0, :, :])
        cube_error_bin = ((0.1 * cube_bin)**2 + cube_bin_std**2)**0.5

        if 0:
            noise_func = lambda x: (1 / np.nansum(x**-2))**0.5
            cube_error_bin = bin_image(cube_error,
                                       binsize=(1, binsize, binsize),
                                       statistic=noise_func,
                                       )

        fits.writeto(in_image,
                     cube,
                     header,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube', 'cube_error'),
                     cube_error,
                     header,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube.fits', 'cube_regrid.fits'),
                     cube_bin,
                     header_bin,
                     clobber=clobber)

        fits.writeto(in_image.replace('cube', 'cube_error_regrid'),
                     cube_error_bin,
                     header_bin,
                     clobber=clobber)
        #else:
        #    cube_bin, header_bin = \
        #        fits.getdata(in_image.replace('cube.fits', 'cube_regrid.fits'),
        #                     clobber=clobber, header=True)

        # make nhi_image
        velocity_axis = make_velocity_axis(header_bin)

        # convert to T_B
        cube_bin_tb = 1.36 * 21**2 * cube_bin * 1000.0 / \
                      (header_bin['BMAJ'] * 3600.) / \
                      (3600. * header_bin['BMIN'])

        cube_tb = 1.36 * 21**2 * cube * 1000.0 / \
                      (header['BMAJ'] * 3600.) / \
                      (3600. * header['BMIN'])

        # convert moment zero images to column density units.
        #	Recall:  1 K = (7.354E-8)*[Bmaj(")*Bmin(")/lamda^2(m)] Jy/Bm

        #	Here, units of images are Jy/Bm m/s; cellsize = 2";
        #	    lambda = 0.211061140507 m

        #	Thus, for the 21 cm line of Hydrogen, we have:

        #	    1 K = Bmaj(")*Bmin(")/(6.057493205E5) Jy/Bm
        #			---- OR ----
        #	    1 Jy/Bm = (6.057493205E5)/[Bmaj(")*Bmin(")]

        #	Now, recall that: N_HI = (1.8224E18 cm^-2)*[T_b (K)]*int(dv)
        #		-- For moment maps in K km/sec, just input the values
        #		& multiply by coefficient.
        #	   -- Assure that units are Jy/Bm km/sec (i.e., divide by 1000)
        #	   Leave in units of 1E20 cm^-2 by dividing by 1E20:

        #	   For a x beam:
        #               N_HI (cm^-2) = (image) *
        #		[(6.057493205E5)/(*)] * (1/1000) * (1.8224E18 cm^-2) *
        #		(1/1E20)
        #		N_HI (cm^-2) = (image)*

        nhi_image = calculate_nhi(cube_bin_tb,
                                  velocity_axis=velocity_axis,
                                  header=header_bin,
                                  fits_filename=\
                            in_image.replace('cube.fits', 'nhi_regrid.fits')  )
        nhi_image = calculate_nhi(cube_tb,
                                  velocity_axis=velocity_axis,
                                  header=header,
                                  fits_filename=\
                            in_image.replace('cube.fits', 'nhi.fits')  )
def calc_data(cloud_list):
    ''' Executes script.
    '''
    import numpy as np
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json

    data_dict = {}

    for i, cloud in enumerate(cloud_list):

        if cloud == 'taurus1' or cloud == 'taurus2':
            cloud_name = 'taurus'
        else:
            cloud_name = cloud

        # define directory locations
        output_dir = '/d/bip3/ezbc/%s/data/python_output/nhi_av/' % cloud_name
        figure_dir = '/d/bip3/ezbc/multicloud/figures/spectra/'
        av_dir = '/d/bip3/ezbc/%s/data/av/' % cloud_name
        hi_dir = '/d/bip3/ezbc/%s/data/hi/' % cloud_name
        co_dir = '/d/bip3/ezbc/%s/data/co/' % cloud_name
        core_dir = output_dir + 'core_arrays/'
        region_dir = '/d/bip3/ezbc/%s/data/' % cloud_name + \
                     'python_output/ds9_regions/'

        # global property filename
        global_property_filename = '{0}_global_properties'.format(cloud)
        property_dir = '/d/bip3/ezbc/{0}/data/python_output/'.format(
            cloud_name)

        av_data_type = 'planck'

        cloud_dict = {}

        # Load HI maps from Taurus, California, and Perseus
        hi_data, hi_header = fits.getdata(
                '/d/bip3/ezbc/{0}/data/hi/{0}_hi_galfa_'.format(cloud_name) + \
                        'cube_regrid_planckres_bin.fits',
                header=True)

        # Load CO maps from Taurus, California, and Perseus
        co_data, co_header = fits.getdata(
                    co_dir + '{0}_co_cfa_'.format(cloud_name) + \
                        'cube_regrid_planckres_bin.fits',
                header=True)
        av_data, av_header = \
                fits.getdata(av_dir + \
                             '{0}_av_planck_tau353_5arcmin_bin.fits'.format(cloud_name),
                             header=True)

        # Mask out region
        with open(property_dir + \
                  global_property_filename + '_' + av_data_type + \
                  '_scaled.txt', 'r') as f:
            global_props = json.load(f)

        # Load cloud division regions from ds9
        global_props = load_ds9_region(global_props,
                        filename='/d/bip3/ezbc/multicloud/data/' + \
                        'python_output/multicloud_divisions.reg',
                        #'python_output/multicloud_divisions_2.reg',
                                header=av_header)

        # Derive relevant region
        region_vertices = \
            np.array(global_props['regions'][cloud]['poly_verts']['pixel'])
        region_mask = np.logical_not(
            myg.get_polygon_mask(av_data, region_vertices))
        if 0:
            import matplotlib.pyplot as plt
            plt.imshow(np.ma.array(av_data, mask=region_mask), origin='lower')
            plt.colorbar()
            plt.show()
        hi_data[:, region_mask] = np.nan
        co_data[:, region_mask] = np.nan

        # sum along spatial axes
        cloud_dict['hi_spectrum'] = calc_global_spectrum(hi_cube=hi_data,
                                                         statistic='median')
        cloud_dict['hi_std'] = calc_global_spectrum(hi_cube=hi_data,
                                                    statistic='std')
        cloud_dict['co_spectrum'] = calc_global_spectrum(hi_cube=co_data,
                                                         statistic='median')
        vel_center = global_props['hi_velocity_width']['value']
        vel_width = global_props['hi_velocity_center']['value']
        #cloud_dict['hi_vel_range'] = (vel_center + vel_width / 2.0,
        #                              vel_center - vel_width / 2.0)
        #cloud_dict['hi_vel_range'] = global_props['hi_velocity_range_conf']
        cloud_dict['hi_vel_range'] = global_props['hi_velocity_range']
        print global_props['hi_velocity_range']

        # Calculate velocity
        cloud_dict['hi_velocity_axis'] = make_velocity_axis(hi_header)
        cloud_dict['co_velocity_axis'] = make_velocity_axis(co_header)

        data_dict[cloud] = cloud_dict

    return data_dict
示例#38
0
def main(dgr=None, vel_range=None, vel_range_type='single', region=None,
        av_data_type='planck', use_binned_images=False):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as pf
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json

    # Script parameters
    # -----------------
    if use_binned_images:
        bin_string = '_bin'
    else:
        bin_string = ''

    # Name of noise cube
    noise_cube_filename = \
            'california_hi_galfa_cube_regrid_planckres_noise' + bin_string + \
            '.fits'

    # Name of property files results are written to
    prop_file = 'california_global_properties_' + av_data_type + '_scaled'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {'wcs' : (((5, 10, 0), (19, 0, 0)),
                                 ((4, 30, 0), (27, 0, 0))),
                          'pixel' : ()
                         }
    elif region == 2:
        region_limit = {'wcs' : (((4, 30, 0), (19, 0, 0)),
                                 ((3, 50, 0), (29, 0, 0))),
                          'pixel' : ()
                        }
    elif region == 3:
        region_limit = {'wcs' : (((4, 30, 0), (29, 0, 0)),
                                 ((3, 50, 0), (33, 0, 0))),
                          'pixel' : ()
                        }
    else:
    	region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/av/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'california_av_lee12_2mass_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
    	print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'california_av_lee12_iris_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    else:
    	print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'california_av_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'california_av_error_planck_5arcmin' + bin_string + \
                    '.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres' + bin_string + \
                '.fits',
            return_header=True)

    hi_noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    if not use_binned_images:
        co_data, co_header = load_fits(co_dir + \
                    'california_co_cfa_cube_regrid_planckres' + bin_string + \
                    '.fits',
                return_header=True)

    # Load global properties of cloud
    # global properties written from script
    # 'av/california_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nReading global parameter file\n' + prop_file + '.txt')
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_width = props['hi_velocity_width_max']['value']
        vel_center = np.array(props['hi_velocity_center']['value'])
        vel_center = -4.0
        vel_range = (vel_center - vel_width / 2.0,
                     vel_center + vel_width / 2.0)
    if dgr is not None:
        props['dust2gas_ratio_max']['value'] = dgr
    else:
        dgr = props['dust2gas_ratio_max']['value']
        intercept = props['intercept_max']['value']

    fit_params = {}
    fit_params['dgr'] = dgr
    fit_params['intercept'] = intercept

    # define core properties
    with open(core_dir + 'california_core_properties.txt', 'r') as f:
        cores = json.load(f)

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)

    if not use_binned_images:
        # make velocity axis for co cube
        co_velocity_axis = make_velocity_axis(co_header)

    # Write core coordinates in pixels
    cores = convert_core_coordinates(cores, hi_header)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'california_av_boxes_',
            header = hi_header)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
            velocity_axis=velocity_axis,
            velocity_range=vel_range,
            header=hi_header,
            noise_cube=hi_noise_cube)

    # create model av map
    av_model = nhi_image * dgr

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0],
                                     vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0],
                                         vel_range[i, 1]))

    print('\nDGR:')
    print('%.2f x 10^-20 cm^2 mag' % (dgr))

    print('\nIntercept:')
    print('%.2f mag' % (intercept))

    # Get mask and mask images
    mask = np.asarray(props['mask' + bin_string])

    mask_images = 1

    if mask_images:
        av_image[mask] = np.nan
        nhi_image[mask] = np.nan
        av_image_error[mask] = np.nan
        av_model[mask] = np.nan

    indices = ((np.isnan(av_model)) & \
               (np.isnan(av_image)) & \
               (np.isnan(av_image_error)))

    if 1:
        import matplotlib.pyplot as plt
        plt.imshow(av_image)
        plt.show()

    print('\nTotal number of pixels after masking = ' + str(props['npix']))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            filename = 'california_av_vs_nhi_' + av_data_type + bin_string

        filename = figure_dir + filename + '.' + figure_type

        print('\nSaving Av model image to \n' + filename)

        plot_av_vs_nhi(nhi_image,
                av_image,
                av_error=av_image_error,
                #limits=[10**-1, 10**1.9, 10**0, 10**1.7],
                fit_params=fit_params,
                limits=[5,40,-0.2,2],
                #limits=[0,30,0,10],
                gridsize=(10,10),
                #scale=('log', 'log'),
                #scale=('linear', 'linear'),
                filename=filename,
                contour_plot=not use_binned_images,
                std=0.22,
                )
def calc_data(ra=None, dec=None):

    ''' Executes script.
    '''
    import numpy as np
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json

    cloud_list = ('california',)
    data_dict = {}

    for i, cloud in enumerate(cloud_list):

        if cloud == 'taurus1' or cloud == 'taurus2':
            cloud_name = 'taurus'
        else:
            cloud_name = cloud

        # define directory locations
        output_dir = '/d/bip3/ezbc/%s/data/python_output/nhi_av/' % cloud_name
        figure_dir = '/d/bip3/ezbc/multicloud/figures/spectra/'
        av_dir = '/d/bip3/ezbc/%s/data/av/' % cloud_name
        hi_dir = '/d/bip3/ezbc/%s/data/hi/' % cloud_name
        co_dir = '/d/bip3/ezbc/%s/data/co/' % cloud_name
        core_dir = output_dir + 'core_arrays/'
        region_dir = '/d/bip3/ezbc/%s/data/' % cloud_name + \
                     'python_output/ds9_regions/'

        # global property filename
        global_property_filename = '{0}_global_properties'.format(cloud)
        property_dir = '/d/bip3/ezbc/{0}/data/python_output/'.format(cloud_name)

        av_data_type = 'planck'

        cloud_dict = {}

        # Load HI maps from Taurus, California, and Perseus
        hi_data, hi_header = fits.getdata(
                '/d/bip3/ezbc/{0}/data/hi/{0}_hi_galfa_'.format(cloud_name) + \
                        'cube_regrid_planckres.fits',
                header=True)

        # Load CO maps from Taurus, California, and Perseus
        co_data, co_header = fits.getdata(
                    co_dir + '{0}_co_cfa_'.format(cloud_name) + \
                        'cube_regrid_planckres.fits',
                header=True)
        av_data, av_header = \
                fits.getdata(av_dir + \
                             '{0}_av_planck_5arcmin.fits'.format(cloud_name),
                             header=True)

        # Mask out region
        with open(property_dir + \
                  global_property_filename + '_' + av_data_type + \
                  '_scaled.txt', 'r') as f:
            global_props = json.load(f)

        # Load cloud division regions from ds9
        global_props = load_ds9_region(global_props,
                        filename='/d/bip3/ezbc/multicloud/data/' + \
                        'python_output/multicloud_divisions.reg',
                                header=av_header)

        # Derive relevant region
        region_vertices = \
            np.array(global_props['regions'][cloud]['poly_verts']['pixel'])
        region_mask = np.logical_not(myg.get_polygon_mask(av_data,
                                                          region_vertices))
        if 0:
            import matplotlib.pyplot as plt
            plt.imshow(np.ma.array(av_data,
                                   mask=region_mask), origin='lower')
            plt.colorbar()
            plt.show()
        #hi_data[:, region_mask] = np.nan
        co_data[:, region_mask] = np.nan

        pix = get_pix_coords(ra=ra, dec=dec, header=av_header)
        print pix, av_data.shape, hi_data.shape

        if 0:
            import matplotlib.pyplot as plt
            plt.plot(hi_data[:, pix[1], pix[0]])
            plt.show()

        # sum along spatial axes
        cloud_dict['hi_spectrum'] = hi_data[:, pix[1], pix[0]]
        cloud_dict['hi_std'] = hi_data[:, pix[1], pix[0]]
        cloud_dict['co_spectrum'] = calc_global_spectrum(
                                        hi_cube=co_data,
                                        statistic='median'
                                        )

        vel_center = global_props['hi_velocity_width']['value']
        vel_width = global_props['hi_velocity_center']['value']
        #cloud_dict['hi_vel_range'] = (vel_center + vel_width / 2.0,
        #                              vel_center - vel_width / 2.0)
        #cloud_dict['hi_vel_range'] = global_props['hi_velocity_range_conf']
        cloud_dict['hi_vel_range'] = global_props['hi_velocity_range']
        #print global_props['hi_velocity_range']

        # Calculate velocity
        cloud_dict['hi_velocity_axis'] = make_velocity_axis(
                                        hi_header)
        cloud_dict['co_velocity_axis'] = make_velocity_axis(
                                        co_header)

        data_dict[cloud] = cloud_dict

    return data_dict
def main():

    import grid
    import numpy as np
    import numpy
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Course, large grid or fine, small grid?
    grid_res = 'course'
    grid_res = 'fine'

    # Results and fits filenames
    likelihood_filename = 'california_nhi_av_likelihoods'
    results_filename = 'california_likelihood'

    # Define ranges of parameters
    if center_vary and width_vary and dgr_vary:
        likelihood_filename += '_width_dgr_center'
        results_filename += '_width_dgr_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1e-2, 1, 2e-2)
    elif not center_vary and width_vary and dgr_vary:

        if grid_res == 'course':
            likelihood_filename += '_dgr_width_lowres'
            results_filename += '_dgr_width_lowres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 80, 1)
            dgrs = np.arange(1e-2, 1, 2e-2)
        elif grid_res == 'fine':
            likelihood_filename += '_dgr_width_highres'
            results_filename += '_dgr_width_highres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 40, 0.16667)
            dgrs = np.arange(0.05, 0.5, 1e-3)
    elif center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width_center'
        results_filename += '_width_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)
    elif not center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width'
        results_filename += '_width'

        velocity_centers = np.arange(5, 6, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)

    # Which likelihood fits should be performed?
    core_likelihoodelation = 0
    global_likelihoodelation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 1

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=h, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.2e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'california_av_boxes_',
            header = h)

    if core_likelihoodelation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # likelihoodelate each core region Av and N(HI) for velocity ranges
            vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
                width_likelihood, dgr_likelihood = \
                    calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgrs=dgrs,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_likelihoods=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    conf=conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                         vel_range_confint[1]))
            print('DGR:')
            print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                         vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_likelihood'] = center_likelihood.tolist()
            cores[core]['width_likelihood'] = width_likelihood.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_likelihoodelation:
        print('\nCalculating likelihoods globally')


        mask = np.zeros(av_data_planck.shape)
        for core in cores:
            # Grab the mask
            mask += myg.get_polygon_mask(av_data_planck,
                    cores[core]['box_vertices_rotated'])

        indices = ((mask == 0) &\
                   (av_data_planck < av_threshold))


        #indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + results_filename

        # likelihoodelate each core region Av and N(HI) for velocity ranges
        vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
            width_likelihood, dgr_likelihood = \
                calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgrs=dgrs,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_likelihoods=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + \
                                        '_global.fits',
                                clobber=clobber,
                                conf=conf,
                                contour_confs=contour_confs)

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range_confint[0],
                                     vel_range_confint[1]))
        print('DGR:')
        print('%.1f to %.1f km/s' % (dgr_confint[0],
                                     dgr_confint[1]))

        global_props['dust2gas_ratio'] = {}
        global_props['dust2gas_ratio_error'] = {}

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['dust2gas_ratio']['value'] = dgr_confint[0]
        global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
        global_props['hi_velocity_range_conf'] = conf
        global_props['center_likelihood'] = center_likelihood.tolist()
        global_props['width_likelihood'] = width_likelihood.tolist()
        global_props['dgr_likelihood'] = dgr_likelihood.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()
        global_props['dgrs'] = dgrs.tolist()
        global_props['likelihoods'] = likelihoods.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
def plot_cluster_nhi_panels(results_ref=None, colors=None, limits=None,
        cube=None, header=None, load_synthetic_cube=False, show=False,
        velocity_range=[0,500], save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_map_panels, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    DIR_FIG = '/d/bip3/ezbc/magellanic_stream/figures/'
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG = DIR_FIG + 'nhi_maps_components.png'
    if save_pdf:
        FILENAME_FIG = FILENAME_FIG.replace('.png','.pdf')

    # Load HI Cube
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH_BASE = DIR_HI + 'cube_synth_comp'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(cube=cube,
                                  velocity_axis=velocity_axis,
                                  velocity_range=velocity_range,
                                  )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_ref['velocity_axis']

    # get number of unique components
    component_colors = np.unique(colors)
    n_components = len(component_colors)

    nhi_list = []
    nhi_max = 0.0
    for i in xrange(n_components):
        if not load_synthetic_cube:
            print('\n\tCreating synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))

            # get the relevant parameters
            indices = np.where(colors == component_colors[i])[0]
            pix_positions = results_ref['pos_pix'][indices]
            fit_params_list = results_ref['data'][indices, 2:]

            print('\n\t\tNumber of components in cube: ' + \
                  '{0:.0f}'.format(len(fit_params_list)))

            cube_synthetic = \
                create_synthetic_cube(pix_positions=pix_positions,
                                      velocity_axis=velocity_axis,
                                      fit_params_list=fit_params_list,
                                      cube_data=cube,
                                      )

            np.save(FILENAME_CUBE_SYNTH_BASE + str(i) + '.npy', cube_synthetic)
        else:
            print('\n\tLoading synthetic cube ' + str(i+1) + ' of ' + \
                   str(n_components))
            cube_synthetic = np.load(FILENAME_CUBE_SYNTH_BASE + str(i) + '.npy')

        # Create N(HI) synthetic
        nhi_synthetic = myia.calculate_nhi(cube=cube_synthetic,
                                           velocity_axis=velocity_axis,
                                           velocity_range=velocity_range,
                                           )

        nhi_list.append(nhi_synthetic)

        nhi_max_temp = np.max(nhi_synthetic)
        if nhi_max_temp > nhi_max:
            nhi_max = nhi_max_temp

    v_limits = [0, nhi_max]

    # crop to highest valued cubes
    n_left = 4
    n_left = len(nhi_list)
    sum_list = []
    for nhi in nhi_list:
        sum_list.append(np.nansum(nhi))
    sort_indices = np.argsort(sum_list)[::-1]
    new_list = []
    for i in xrange(n_left):
        new_list.append(nhi_list[sort_indices[i]])
    nhi_list = new_list

    # Plot the maps together

    plot_nhi_map_panels(nhi_list,
                        header=header,
                        #limits=[278, -37, 282, -35],
                        limits=limits,
                        filename=FILENAME_FIG,
                        nhi_vlimits=[-0.1, 15],
                        show=show,
                        vscale='linear',
                        )
def main():

    import grid
    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    from multiprocessing import Pool

    global _hi_cube
    global _hi_velocity_axis
    global _hi_noise_cube
    global _av_image
    global _av_image_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Include only pixels within core regions for analysis?
    core_mask = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Results and fits filenames
    likelihood_filename = 'perseus_nhi_av_likelihoods_mcmc_co_av'
    results_filename = 'perseus_likelihood_mcmc_co_av'
    global _progress_filename
    _progress_filename = 'perseus_mcmc_samples.dat'

    # Define ranges of parameters
    global _av_thres_range
    _av_thres_range = (1.0, 1.1)
    _av_thres_range = (0.1, 2.0)
    global _vel_width_range
    _vel_width_range = (0.0, 80.0)
    global _dgr_range
    _dgr_range = (0.01, 0.4)
    global _velocity_center
    _velocity_center = 5.0  # km/s

    # MCMC parameters
    global _ndim
    _ndim = 3
    global _nwalkers
    _nwalkers = 100
    global _niter
    _niter = 1000
    global _init_guesses
    _init_guesses = np.array((10, 0.10, 1.0))
    global _init_spread
    _init_spread = np.array((0.1, 0.01, 0.01))
    global _mc_threads
    _mc_threads = 10

    # Name of property files results are written to
    global_property_file = 'perseus_global_properties.txt'
    core_property_file = 'perseus_core_properties.txt'

    # Name of noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise.fits'

    # Define limits for plotting the map
    prop_dict = {}
    prop_dict['limit_wcs'] = (((3, 58, 0), (27, 6, 0)), ((3, 20, 0), (35, 0,
                                                                      0)))
    prop_dict['limit_wcs'] = (((3, 58, 0), (26, 6, 0)), ((3, 0, 0), (35, 0,
                                                                     0)))

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    global _likelihood_dir
    _likelihood_dir = likelihood_dir

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'perseus_av_planck_5arcmin.fits',
            return_header=True)
    prop_dict['av_header'] = av_header

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'perseus_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'perseus_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                                          velocity_axis=velocity_axis,
                                          velocity_noise_range=[90, 110],
                                          header=h,
                                          Tsys=30.,
                                          filename=hi_dir +
                                          noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                             return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'perseus_av_boxes_',
                            header=h)

    print('\nCalculating likelihoods globally')

    mask = np.zeros(av_data_planck.shape)
    for core in cores:
        # Grab the mask
        mask += myg.get_polygon_mask(av_data_planck,
                                     cores[core]['wedge_vertices_rotated'])

    co_mom0 = np.sum(co_data, axis=0)

    # Mask images
    core_mask = 0
    if core_mask:
        indices = ((mask == 1) & \
                   (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)])*2.0))
        mask_type = '_core_mask'
    else:
        indices = (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)]) * 2.0)
        mask_type = ''

    hi_data_sub = np.copy(hi_data[:, indices])
    noise_cube_sub = np.copy(noise_cube[:, indices])
    av_data_sub = np.copy(av_data_planck[indices])
    av_error_data_sub = np.copy(av_error_data_planck[indices])

    # Set global variables
    _hi_cube = hi_data_sub
    _hi_velocity_axis = velocity_axis
    _hi_noise_cube = noise_cube_sub
    _av_image = av_data_sub
    _av_image_error = av_error_data_sub

    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    # likelihoodelate each core region Av and N(HI) for velocity ranges
    vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
        width_likelihood, dgr_likelihood = \
            calc_likelihood(return_likelihoods=True,
                            plot_results=True,
                            results_filename=results_filename + mask_type,
                            likelihood_filename=likelihood_dir + \
                                    likelihood_filename + \
                                    mask_type + '.npy',
                            clobber=clobber,
                            conf=conf,
                            contour_confs=contour_confs)
    '''
示例#43
0
def main(dgr=None,
         vel_range=(-5, 15),
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    import pyfits as fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Which cores to include in analysis?
    cores_to_keep = [  # taur
        'L1495',
        'L1495A',
        'B213',
        'L1498',
        'B215',
        'B18',
        'B217',
        'B220-1',
        'B220-2',
        'L1521',
        'L1524',
        'L1527-1',
        'L1527-2',
        # Calif
        'L1536',
        'L1483-1',
        'L1483-2',
        'L1482-1',
        'L1482-2',
        'L1478-1',
        'L1478-2',
        'L1456',
        'NGC1579',
        #'L1545',
        #'L1517',
        #'L1512',
        #'L1523',
        #'L1512',
        # Pers
        'B5',
        'IC348',
        'B1E',
        'B1',
        'NGC1333',
        'B4',
        'B3',
        'L1455',
        'L1448',
    ]

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_radiance_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_radiance_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)

    print('\nLoading global property file {0:s}.txt'.format(prop_file))
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    # Define velocity range
    props['hi_velocity_range'] = vel_range

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                                             velocity_axis=velocity_axis,
                                             velocity_noise_range=[90, 110],
                                             header=hi_header,
                                             Tsys=30.,
                                             filename=hi_dir +
                                             noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir +
                                                   noise_cube_filename,
                                                   header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits', 'plot_limit',
                                              'region_name_pos'))

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    cloud_dict = {
        'taurus': {},
        'perseus': {},
        'california': {},
    }

    # load Planck Av and GALFA HI images, on same grid
    for cloud in cloud_dict:

        print('\nLoading core properties for {0:s}'.format(cloud))

        file_dir = '/d/bip3/ezbc/{0:s}/data/av/'.format(cloud)

        # define core properties
        with open('/d/bip3/ezbc/{0:s}/data/python_output/'.format(cloud) + \
                  'core_properties/{0:s}_core_properties.txt'.format(cloud),
                  'r') as f:
            cores = json.load(f)

        # Load core regions from DS9 files
        if cloud == 'aldobaran':
            region_cloud = 'california'
        else:
            region_cloud = cloud
        core_filename = region_dir.replace('multicloud',region_cloud) + \
                        '/ds9_regions/{0:s}_av_poly_cores'.format(region_cloud)

        cores = load_ds9_core_region(cores,
                                     filename_base=core_filename,
                                     header=av_header)

        cores = convert_core_coordinates(cores, av_header)

        # Remove cores
        cores_to_remove = []
        for core in cores:
            if core not in cores_to_keep:
                cores_to_remove.append(core)
        for core_to_remove in cores_to_remove:
            del cores[core_to_remove]

        cloud_dict[cloud]['cores'] = cores

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        filename = 'multicloud_av_cores_map' + \
                   '.{0:s}'.format(figure_type)

        print('\nSaving Av cores map to \n' + filename)

        plot_cores_map(
            header=av_header,
            av_image=av_image,
            limits=props['plot_limit']['pixel'],
            regions=props['regions'],
            cloud_dict=cloud_dict,
            cores_to_keep=cores_to_keep,
            props=props,
            hi_vlimits=(0, 20),
            av_vlimits=(0, 16),
            #av_vlimits=(0.1,30),
            savedir=figure_dir + 'maps/',
            filename=filename,
            show=False)
def main():

    import grid
    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or correlation with Av?
    hi_av_correlation = True
    velocity_centers = np.arange(-15, 30, 4)
    velocity_widths = np.arange(1, 80, 4)

    # Which likelihood fits should be performed?
    core_correlation = 0
    global_correlation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 100

    # Check if likelihood file already written, rewrite?>
    likelihood_filename = 'california_nhi_av_likelihoods'
    clobber = 0
    hi_vel_range_conf = 0.50

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                                          velocity_axis=velocity_axis,
                                          velocity_noise_range=[90, 110],
                                          header=h,
                                          Tsys=30.,
                                          filename=hi_dir +
                                          noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                             return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.22e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'california_av_boxes_',
                            header=h)

    if core_correlation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                                        cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # Correlate each core region Av and N(HI) for velocity ranges
            vel_range_confint, correlations, center_corr, width_corr = \
                    correlate_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgr=dgr,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_correlations=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    hi_vel_range_conf=hi_vel_range_conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' %
                  (vel_range_confint[0], vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_corr'] = center_corr.tolist()
            cores[core]['width_corr'] = width_corr.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_correlation:
        print('\nCalculating correlations globally')

        indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + 'california_logL_global.png'

        # Correlate each core region Av and N(HI) for velocity ranges
        vel_range_confint, correlations, center_corr, width_corr = \
                correlate_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgr=dgr,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_correlations=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + '_global.fits',
                                clobber=clobber,
                                hi_vel_range_conf=hi_vel_range_conf)
        '''
        fit_hi_vel_range(guesses=(0, 30),
                         av_image=av_data_sub,
                         av_image_error=av_error_data_sub,
                         hi_cube=hi_data_sub,
                         hi_velocity_axis=velocity_axis,
                         hi_noise_cube=noise_cube_sub,
                         dgr=dgr)
        '''

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' %
              (vel_range_confint[0], vel_range_confint[1]))

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['hi_velocity_range_conf'] = hi_vel_range_conf
        global_props['center_corr'] = center_corr.tolist()
        global_props['width_corr'] = width_corr.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
def main():

    import grid
    import numpy as np
    import numpy
    from os import system,path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    from multiprocessing import Pool

    global _hi_cube
    global _hi_velocity_axis
    global _hi_noise_cube
    global _av_image
    global _av_image_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 1

    # Include only pixels within core regions for analysis?
    core_mask = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Results and fits filenames
    likelihood_filename = 'perseus_nhi_av_likelihoods_mcmc_co_av'
    results_filename = 'perseus_likelihood_mcmc_co_av'
    global _progress_filename
    _progress_filename = 'perseus_mcmc_samples.dat'

    # Define ranges of parameters
    global _av_thres_range
    _av_thres_range = (1.0, 1.1)
    _av_thres_range = (0.1, 2.0)
    global _vel_width_range
    _vel_width_range = (0.0, 80.0)
    global _dgr_range
    _dgr_range = (0.01, 0.4)
    global _velocity_center
    _velocity_center = 5.0 # km/s

    # MCMC parameters
    global _ndim
    _ndim = 3
    global _nwalkers
    _nwalkers = 100
    global _niter
    _niter = 1000
    global _init_guesses
    _init_guesses = np.array((10, 0.10, 1.0))
    global _init_spread
    _init_spread = np.array((0.1, 0.01, 0.01))
    global _mc_threads
    _mc_threads = 10

    # Name of property files results are written to
    global_property_file = 'perseus_global_properties.txt'
    core_property_file = 'perseus_core_properties.txt'

    # Name of noise cube
    noise_cube_filename = 'perseus_hi_galfa_cube_regrid_planckres_noise.fits'

    # Define limits for plotting the map
    prop_dict = {}
    prop_dict['limit_wcs'] = (((3, 58, 0), (27, 6, 0)),
                              ((3, 20, 0), (35, 0, 0)))
    prop_dict['limit_wcs'] = (((3, 58, 0), (26, 6, 0)),
                              ((3, 0, 0), (35, 0, 0)))

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/perseus/data/av/'
    hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
    co_dir = '/d/bip3/ezbc/perseus/data/co/'
    core_dir = '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
    region_dir = '/d/bip3/ezbc/perseus/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    global _likelihood_dir
    _likelihood_dir = likelihood_dir

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'perseus_av_planck_5arcmin.fits',
            return_header=True)
    prop_dict['av_header'] = av_header

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'perseus_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'perseus_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'perseus_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                velocity_axis=velocity_axis,
                velocity_noise_range=[90,110], header=h, Tsys=30.,
                filename=hi_dir + noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
            return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
            filename_base = region_dir + 'perseus_av_boxes_',
            header = h)

    print('\nCalculating likelihoods globally')

    mask = np.zeros(av_data_planck.shape)
    for core in cores:
        # Grab the mask
        mask += myg.get_polygon_mask(av_data_planck,
                cores[core]['wedge_vertices_rotated'])

    co_mom0 = np.sum(co_data, axis=0)

    # Mask images
    core_mask = 0
    if core_mask:
        indices = ((mask == 1) & \
                   (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)])*2.0))
        mask_type = '_core_mask'
    else:
        indices = (co_mom0 < np.std(co_mom0[~np.isnan(co_mom0)])*2.0)
        mask_type = ''

    hi_data_sub = np.copy(hi_data[:, indices])
    noise_cube_sub = np.copy(noise_cube[:, indices])
    av_data_sub = np.copy(av_data_planck[indices])
    av_error_data_sub = np.copy(av_error_data_planck[indices])

    # Set global variables
    _hi_cube = hi_data_sub
    _hi_velocity_axis = velocity_axis
    _hi_noise_cube = noise_cube_sub
    _av_image = av_data_sub
    _av_image_error = av_error_data_sub

    # Define filename for plotting results
    results_filename = figure_dir + results_filename

    # likelihoodelate each core region Av and N(HI) for velocity ranges
    vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
        width_likelihood, dgr_likelihood = \
            calc_likelihood(return_likelihoods=True,
                            plot_results=True,
                            results_filename=results_filename + mask_type,
                            likelihood_filename=likelihood_dir + \
                                    likelihood_filename + \
                                    mask_type + '.npy',
                            clobber=clobber,
                            conf=conf,
                            contour_confs=contour_confs)

    '''
示例#46
0
def calculate_nhi(cube = None, velocity_axis = None, velocity_range = [],
        return_nhi_error = True, noise_cube = None,
        velocity_noise_range=[90,100], Tsys = 30., header = None,
        fits_filename = None, fits_error_filename = None, verbose = True):

    ''' Calculates an N(HI) image given a velocity range within which to
    include a SpectralGrid's components.

    Parameters
    ----------
    cube : array-like, optional
        Three dimensional array with velocity axis as 0th axis. Must specify
        a velocity_axis if cube is used.
    velocity_axis : array-like, optional
        One dimensional array containing velocities corresponding to
    fits_filename : str
        If specified, and a header is provided, the nhi image will be written.
    header : pyfits.Header
        Header from cube.

    '''

    import numpy as np
    from mycoords import make_velocity_axis

    if velocity_axis is None:
        velocity_axis = make_velocity_axis(header)

    # Calculate NHI from cube if set
    if cube is not None and velocity_axis is not None:
        image = np.empty((cube.shape[1],
                          cube.shape[2]))
        image[:,:] = np.NaN
        indices = np.where((velocity_axis > velocity_range[0]) & \
                (velocity_axis < velocity_range[1]))[0]
        image[:,:] = cube[indices,:,:].sum(axis=0)
        # Calculate image error
        if return_nhi_error:
            image_error = np.empty((cube.shape[1],
                              cube.shape[2]))
            image_error[:,:] = np.NaN
            image_error[:,:] = (noise_cube[indices,:,:]**2).sum(axis=0)**0.5

    # NHI in units of 1e20 cm^-2
    nhi_image = np.ma.array(image,mask=np.isnan(image)) * 1.823e-2


    if fits_filename is not None and header is not None:
        if verbose:
        	print('Writing N(HI) image to FITS file %s' % fits_filename)
        header['BUNIT'] = '1e20 cm^-2'
        header.remove('CDELT3')
        header.remove('CRVAL3')
        header.remove('CRPIX3')
        header.remove('CTYPE3')
        header.remove('NAXIS3')
        header['NAXIS'] = 2

        pf.writeto(fits_filename, image*1.823e-2, header = header, clobber =
                True, output_verify = 'fix')

    if fits_error_filename is not None and header is not None:
        if verbose:
        	print('Writing N(HI) error image to FITS file %s' % fits_filename)

        pf.writeto(fits_error_filename, image_error * 1.823e-2, header =
                header, clobber = True, output_verify = 'fix')

    if return_nhi_error:
        nhi_image_error = np.ma.array(image_error,
                mask=np.isnan(image_error)) * 1.823e-2
        return nhi_image, nhi_image_error
    else:
        return nhi_image
示例#47
0
def main():

    import grid
    import numpy as np
    import numpy
    from os import system, path
    import mygeometry as myg
    from mycoords import make_velocity_axis
    import json
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error

    # parameters used in script
    # -------------------------
    # HI velocity integration range
    # Determine HI integration velocity by CO or likelihoodelation with Av?
    hi_av_likelihoodelation = True

    center_vary = False
    width_vary = True
    dgr_vary = True

    # Check if likelihood file already written, rewrite?
    clobber = 0

    # Confidence of parameter errors
    conf = 0.68
    # Confidence of contour levels
    contour_confs = (0.68, 0.95)

    # Course, large grid or fine, small grid?
    grid_res = 'course'
    grid_res = 'fine'

    # Results and fits filenames
    likelihood_filename = 'california_nhi_av_likelihoods'
    results_filename = 'california_likelihood'

    # Define ranges of parameters
    if center_vary and width_vary and dgr_vary:
        likelihood_filename += '_width_dgr_center'
        results_filename += '_width_dgr_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1e-2, 1, 2e-2)
    elif not center_vary and width_vary and dgr_vary:

        if grid_res == 'course':
            likelihood_filename += '_dgr_width_lowres'
            results_filename += '_dgr_width_lowres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 80, 1)
            dgrs = np.arange(1e-2, 1, 2e-2)
        elif grid_res == 'fine':
            likelihood_filename += '_dgr_width_highres'
            results_filename += '_dgr_width_highres'
            velocity_centers = np.arange(5, 6, 1)
            velocity_widths = np.arange(1, 40, 0.16667)
            dgrs = np.arange(0.05, 0.5, 1e-3)
    elif center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width_center'
        results_filename += '_width_center'

        velocity_centers = np.arange(-15, 30, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)
    elif not center_vary and width_vary and not dgr_vary:
        likelihood_filename += '_width'
        results_filename += '_width'

        velocity_centers = np.arange(5, 6, 1)
        velocity_widths = np.arange(1, 80, 1)
        dgrs = np.arange(1.1e-1, 1.2e-1, 0.1e-1)

    # Which likelihood fits should be performed?
    core_likelihoodelation = 0
    global_likelihoodelation = 1

    # Name of property files results are written to
    global_property_file = 'california_global_properties.txt'
    core_property_file = 'california_core_properties.txt'

    # Threshold of Av below which we expect only atomic gas, in mag
    av_threshold = 1

    # Name of noise cube
    noise_cube_filename = 'california_hi_galfa_cube_regrid_planckres_noise.fits'

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/california/figures/hi_velocity_range/'
    av_dir = '/d/bip3/ezbc/california/data/av/'
    hi_dir = '/d/bip3/ezbc/california/data/hi/'
    co_dir = '/d/bip3/ezbc/california/data/co/'
    core_dir = '/d/bip3/ezbc/california/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/california/data/python_output/'
    region_dir = '/d/bip3/ezbc/california/data/python_output/ds9_regions/'
    likelihood_dir = '/d/bip3/ezbc/california/data/python_output/nhi_av/'

    # load Planck Av and GALFA HI images, on same grid
    av_data_planck, av_header = load_fits(av_dir + \
                'california_av_planck_5arcmin.fits',
            return_header=True)

    av_error_data_planck, av_error_header = load_fits(av_dir + \
                'california_av_error_planck_5arcmin.fits',
            return_header=True)

    hi_data, h = load_fits(hi_dir + \
                'california_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = make_velocity_axis(h)

    # Plot NHI vs. Av for a given velocity range
    if not path.isfile(hi_dir + noise_cube_filename):
        noise_cube = calculate_noise_cube(cube=hi_data,
                                          velocity_axis=velocity_axis,
                                          velocity_noise_range=[90, 110],
                                          header=h,
                                          Tsys=30.,
                                          filename=hi_dir +
                                          noise_cube_filename)
    else:
        noise_cube, noise_header = load_fits(hi_dir + noise_cube_filename,
                                             return_header=True)

    # define core properties
    with open(core_dir + core_property_file, 'r') as f:
        cores = json.load(f)
    with open(property_dir + global_property_file, 'r') as f:
        global_props = json.load(f)

    dgr = global_props['dust2gas_ratio']['value']
    dgr = 1.2e-1

    cores = convert_core_coordinates(cores, h)

    cores = load_ds9_region(cores,
                            filename_base=region_dir + 'california_av_boxes_',
                            header=h)

    if core_likelihoodelation:
        for core in cores:
            print('\nCalculating for core %s' % core)

            # Grab the mask
            mask = myg.get_polygon_mask(av_data_planck,
                                        cores[core]['box_vertices_rotated'])

            indices = ((mask == 0) &\
                       (av_data_planck < av_threshold))

            hi_data_sub = np.copy(hi_data[:, indices])
            noise_cube_sub = np.copy(noise_cube[:, indices])
            av_data_sub = np.copy(av_data_planck[indices])
            av_error_data_sub = np.copy(av_error_data_planck[indices])

            # Define filename for plotting results
            results_filename = figure_dir + 'california_logL_%s.png' % core

            # likelihoodelate each core region Av and N(HI) for velocity ranges
            vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
                width_likelihood, dgr_likelihood = \
                    calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                    hi_velocity_axis=velocity_axis,
                                    hi_noise_cube=noise_cube_sub,
                                    av_image=av_data_sub,
                                    av_image_error=av_error_data_sub,
                                    dgrs=dgrs,
                                    velocity_centers=velocity_centers,
                                    velocity_widths=velocity_widths,
                                    return_likelihoods=True,
                                    plot_results=True,
                                    results_filename=results_filename,
                                    likelihood_filename=likelihood_dir + \
                                            likelihood_filename + \
                                            '{0:s}.fits'.format(core),
                                    clobber=clobber,
                                    conf=conf)

            print('HI velocity integration range:')
            print('%.1f to %.1f km/s' %
                  (vel_range_confint[0], vel_range_confint[1]))
            print('DGR:')
            print('%.1f to %.1f km/s' %
                  (vel_range_confint[0], vel_range_confint[1]))

            cores[core]['hi_velocity_range'] = vel_range_confint[0:2]
            cores[core]['hi_velocity_range_error'] = vel_range_confint[2:]
            cores[core]['center_likelihood'] = center_likelihood.tolist()
            cores[core]['width_likelihood'] = width_likelihood.tolist()
            cores[core]['vel_centers'] = velocity_centers.tolist()
            cores[core]['vel_widths'] = velocity_widths.tolist()

        with open(core_dir + core_property_file, 'w') as f:
            json.dump(cores, f)

    if global_likelihoodelation:
        print('\nCalculating likelihoods globally')

        mask = np.zeros(av_data_planck.shape)
        for core in cores:
            # Grab the mask
            mask += myg.get_polygon_mask(av_data_planck,
                                         cores[core]['box_vertices_rotated'])

        indices = ((mask == 0) &\
                   (av_data_planck < av_threshold))

        #indices = ((av_data_planck < av_threshold))

        hi_data_sub = np.copy(hi_data[:, indices])
        noise_cube_sub = np.copy(noise_cube[:, indices])
        av_data_sub = np.copy(av_data_planck[indices])
        av_error_data_sub = np.copy(av_error_data_planck[indices])

        # Define filename for plotting results
        results_filename = figure_dir + results_filename

        # likelihoodelate each core region Av and N(HI) for velocity ranges
        vel_range_confint, dgr_confint, likelihoods, center_likelihood,\
            width_likelihood, dgr_likelihood = \
                calc_likelihood_hi_av(hi_cube=hi_data_sub,
                                hi_velocity_axis=velocity_axis,
                                hi_noise_cube=noise_cube_sub,
                                av_image=av_data_sub,
                                av_image_error=av_error_data_sub,
                                dgrs=dgrs,
                                velocity_centers=velocity_centers,
                                velocity_widths=velocity_widths,
                                return_likelihoods=True,
                                plot_results=True,
                                results_filename=results_filename,
                                likelihood_filename=likelihood_dir + \
                                        likelihood_filename + \
                                        '_global.fits',
                                clobber=clobber,
                                conf=conf,
                                contour_confs=contour_confs)

        print('HI velocity integration range:')
        print('%.1f to %.1f km/s' %
              (vel_range_confint[0], vel_range_confint[1]))
        print('DGR:')
        print('%.1f to %.1f km/s' % (dgr_confint[0], dgr_confint[1]))

        global_props['dust2gas_ratio'] = {}
        global_props['dust2gas_ratio_error'] = {}

        global_props['hi_velocity_range'] = vel_range_confint[0:2]
        global_props['hi_velocity_range_error'] = vel_range_confint[2:]
        global_props['dust2gas_ratio']['value'] = dgr_confint[0]
        global_props['dust2gas_ratio_error']['value'] = dgr_confint[1:]
        global_props['hi_velocity_range_conf'] = conf
        global_props['center_likelihood'] = center_likelihood.tolist()
        global_props['width_likelihood'] = width_likelihood.tolist()
        global_props['dgr_likelihood'] = dgr_likelihood.tolist()
        global_props['vel_centers'] = velocity_centers.tolist()
        global_props['vel_widths'] = velocity_widths.tolist()
        global_props['dgrs'] = dgrs.tolist()
        global_props['likelihoods'] = likelihoods.tolist()

        with open(property_dir + global_property_file, 'w') as f:
            json.dump(global_props, f)
示例#48
0
def main(dgr=None,
         vel_range=None,
         vel_range_type='single',
         region=None,
         av_data_type='planck'):
    ''' Executes script.

    Parameters
    ----------
    dgr : float
        If None, pulls best-fit value from properties.
    vel_range : tuple
        If None, pulls best-fit value from properties.
    '''

    # import external modules
    #import pyfits as fits
    from astropy.io import fits
    import numpy as np
    from mycoords import make_velocity_axis
    import mygeometry as myg
    from myimage_analysis import calculate_nhi, calculate_noise_cube, \
        calculate_sd, calculate_nh2, calculate_nh2_error
    import json
    from os import system, path

    # Script parameters
    # -----------------
    # Name of noise cube
    noise_cube_filename = 'multicloud_hi_galfa_cube_regrid_planckres_noise.fits'

    # Use Planck dust Av map or Kainulainen 2009 optical extinction Av map?
    # options are 'planck' or 'lee12'
    #av_data_type = 'lee12'
    #av_data_type = 'planck'

    # Global parameter file
    prop_file = 'multicloud_global_properties'

    # Regions, regions to edit the global properties with
    if region == 1:
        region_limit = {
            'wcs': (((5, 10, 0), (19, 0, 0)), ((4, 30, 0), (27, 0, 0))),
            'pixel': ()
        }
    elif region == 2:
        region_limit = {
            'wcs': (((4, 30, 0), (19, 0, 0)), ((3, 50, 0), (29, 0, 0))),
            'pixel': ()
        }
    elif region == 3:
        region_limit = {
            'wcs': (((4, 30, 0), (29, 0, 0)), ((3, 50, 0), (33, 0, 0))),
            'pixel': ()
        }
    else:
        region_limit = None

    # define directory locations
    # --------------------------
    output_dir = '/d/bip3/ezbc/multicloud/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/multicloud/data/av/'
    hi_dir = '/d/bip3/ezbc/multicloud/data/hi/'
    co_dir = '/d/bip3/ezbc/multicloud/data/co/'
    core_dir = '/d/bip3/ezbc/multicloud/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    # load Planck Av and GALFA HI images, on same grid
    if av_data_type == 'lee12_2mass':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_2mass_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'lee12_iris':
        print('\nLoading Lee+12 data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_lee12_iris_regrid_planckres.fits',
                return_header=True)
        av_image_error = 0.1 * np.ones(av_image.shape)
    elif av_data_type == 'planck_rad':
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)
        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)
    else:
        print('\nLoading Planck data...')
        av_image, av_header = load_fits(av_dir + \
                    'multicloud_av_planck_tau353_5arcmin.fits',
                return_header=True)

        av_image_error, av_error_header = load_fits(av_dir + \
                    'multicloud_av_error_planck_tau353_5arcmin.fits',
                return_header=True)

    hi_cube, hi_header = load_fits(hi_dir + \
                'multicloud_hi_galfa_cube_regrid_planckres.fits',
            return_header=True)

    co_data, co_header = load_fits(co_dir + \
                'multicloud_co_cfa_cube_regrid_planckres.fits',
            return_header=True)

    # Prepare data products
    # ---------------------
    # Load global properties of cloud
    # global properties written from script
    # 'av/multicloud_analysis_global_properties.txt'
    if region is not None:
        likelihood_filename += '_region{0:.0f}'.format(region)
        results_filename += '_region{0:.0f}'.format(region)
    with open(property_dir + prop_file + '.txt', 'r') as f:
        props = json.load(f)

    if vel_range is not None:
        props['hi_velocity_range'] = vel_range
    else:
        vel_range = props['hi_velocity_range']

    # make velocity axis for hi cube
    velocity_axis = make_velocity_axis(hi_header)
    # make velocity axis for co cube
    co_velocity_axis = make_velocity_axis(co_header)

    # Load the HI noise cube if it exists, else make it
    if not path.isfile(hi_dir + noise_cube_filename):
        hi_noise_cube = calculate_noise_cube(cube=hi_cube,
                                             velocity_axis=velocity_axis,
                                             velocity_noise_range=[90, 110],
                                             header=hi_header,
                                             Tsys=30.,
                                             filename=hi_dir +
                                             noise_cube_filename)
    else:
        hi_noise_cube, noise_header = fits.getdata(hi_dir +
                                                   noise_cube_filename,
                                                   header=True)

    # create nhi image
    nhi_image = calculate_nhi(cube=hi_cube,
                              velocity_axis=velocity_axis,
                              velocity_range=vel_range,
                              header=hi_header,
                              noise_cube=hi_noise_cube)

    props['plot_limit']['wcs'] = (((5, 20, 0), (19, 0, 0)), ((2, 30, 0),
                                                             (37, 0, 0)))

    props['region_name_pos'] = {
        #'taurus 1' : {'wcs' : ((3, 50,  0),
        #                       (21.5, 0, 0)),
        #             },
        #'taurus 2' : {'wcs' : ((5, 10,  0),
        #                       (21.5, 0, 0)),
        #             },
        'taurus': {
            'wcs': ((4, 40, 0), (21, 0, 0)),
        },
        'perseus': {
            'wcs': ((3, 30, 0), (26, 0, 0)),
        },
        #'perseus 1' : {'wcs' : ((3, 0,  0),
        #                      (34, 0, 0)),
        #             },
        #'perseus 2' : {'wcs' : ((3, 10,  0),
        #                      (22.5, 0, 0)),
        #             },
        'california': {
            'wcs': ((4, 28, 0), (34, 0, 0)),
        },
    }

    # Change WCS coords to pixel coords of images
    props = convert_limit_coordinates(props,
                                      header=av_header,
                                      coords=('region_limit',
                                              'co_noise_limits', 'plot_limit',
                                              'region_name_pos'))

    props['plot_limit']['wcs'] = [
        15 * (5 + 20. / 60), 15 * (2 + 30. / 60.), 17, 38.5
    ]

    # Load cloud division regions from ds9
    props = load_ds9_region(props,
                            filename=region_dir + 'multicloud_divisions.reg',
                            header=av_header)

    # Derive relevant region
    pix = props['region_limit']['pixel']
    region_vertices = ((pix[1], pix[0]), (pix[1], pix[2]), (pix[3], pix[2]),
                       (pix[3], pix[0]))

    # block offregion
    region_mask = myg.get_polygon_mask(av_image, region_vertices)

    print('\nRegion size = ' + \
          '{0:.0f} pix'.format(region_mask[region_mask == 1].size))

    if vel_range_type == 'single':
        print('\nHI velocity integration range:')
        print('%.1f to %.1f km/s' % (vel_range[0], vel_range[1]))
    elif vel_range_type == 'multiple':
        print('\nHI velocity integration ranges:')
        for i in xrange(0, vel_range.shape[0]):
            print('%.1f to %.1f km/s' % (vel_range[i, 0], vel_range[i, 1]))

    # Plot
    figure_types = ['png', 'pdf']
    for figure_type in figure_types:
        if region is None:
            if vel_range_type == 'single':
                filename = 'multicloud_av_nhi_map' + \
                    '.%s' % figure_type
                #av_data_type + \
                #'dgr{0:.3f}_'.format(dgr) + \
                #'{0:.1f}to{1:.1f}kms'.format(vel_range[0], vel_range[1]) + \
                #'_' + \
            elif vel_range_type == 'multiple':
                filename = 'multiple_vel_range/multicloud_av_model_map' + \
                           'dgr{0:.3f}'.format(dgr)
                for i in xrange(0, vel_range.shape[0]):
                    filename += '_{0:.1f}to{1:.1f}kms'.format(
                        vel_range[i, 0], vel_range[i, 1])
                filename += '.%s' % figure_type
        else:
            filename = 'multicloud_av_model_map_region{0:.0f}'.format(region) + \
                       '.{0:s}'.format(figure_type)

        filename = 'av_map'
        filename = figure_dir + 'maps/' + filename + '.' + figure_type
        print('\nSaving Av model image to \n' + filename)

        plot_av_image(
            av_image=av_image,
            header=av_header,
            limits=[15 * (5 + 20. / 60), 15 * (2 + 30. / 60.), 17, 38.5],
            limits_type='wcs',
            regions=props['regions'],
            props=props,
            av_vlimits=(0, 15.5),
            filename=filename,
            show=False)

        if 0:
            filename = 'av_nhi_map'
            filename = figure_dir + 'maps/' + filename + '.' + figure_type
            print('\nSaving NHI + Av maps to \n' + filename)
            plot_nhi_image(
                nhi_image=nhi_image,
                header=av_header,
                av_image=av_image,
                limits=props['plot_limit']['wcs'],
                limits_type='wcs',
                regions=props['regions'],
                props=props,
                hi_vlimits=(0, 20),
                av_vlimits=(0, 15.5),
                #av_vlimits=(0.1,30),
                filename=filename,
                show=False)