def run_analysis(cloud_name):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import myio

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    likelihood_dir = \
            '/d/bip3/ezbc/' + cloud_name + '/data/python_output/nhi_av/'
    results_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    av_planck_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_error_filename = av_dir + \
       cloud_name + '_av_error_planck_tau353_5arcmin.fits'
    av_k09_filename = av_dir + \
       cloud_name + '_av_k09_regrid_planckres.fits'
    if cloud_name == 'perseus':
        av_lee12_filename = av_dir + \
           cloud_name + '_av_lee12_2mass_regrid_planckres.fits'
    else:
        av_lee12_filename = None

    av_error = 0.4

    av_background = 0.0

    # Get data
    av_data_pl, av_header_pl = fits.getdata(av_planck_filename, header=True)
    av_data_k09, av_header_k09 = fits.getdata(av_k09_filename, header=True)
    if cloud_name == 'perseus':
        av_data_lee12, av_header_lee12 = fits.getdata(av_lee12_filename,
                                                      header=True)
    else:
        av_data_lee12, av_header_lee12 = None, None

    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data_pl,
                                   av_header_pl,
                                   region_name=cloud_name)

    av_data_pl[region_mask] = np.nan
    av_data_k09[region_mask] = np.nan
    if cloud_name == 'perseus':
        av_data_lee12[region_mask] = np.nan

    #if debugging:
    if 0:
        import matplotlib.pyplot as plt
        plt.close()
        plt.clf()
        plt.imshow(av_data, origin='lower')
        plt.savefig('/usr/users/ezbc/Desktop/avmap.png')

    #hi_data, hi_header = fits.getdata(hi_filename, header=True)
    #co_data, co_header = fits.getdata(co_filename, header=True)
    #hi_vel_axis = make_velocity_axis(hi_header)

    filename = figure_dir + 'av/' + cloud_name + '_planck_vs_k09.png'
    plot_planck_vs_2mass(
        av_data_k09,
        av_data_pl,
        filename=filename,
        title=cloud_name,
        fit_labels=[r'$A_{V,{\rm Planck}}$', r'$A_{V,{\rm K+09}}$'],
        labels=[
            r'$A_{V,{\rm K+09\ 2MASS}}$ [mag]', r'$A_{V,{\rm Planck}}$ [mag]'
        ],
        limits=[-1, 20, -1, 20])
    if cloud_name == 'perseus':
        filename = figure_dir + 'av/' + cloud_name + '_lee12_vs_k09.png'
        plot_planck_vs_2mass(
            av_data_k09,
            av_data_lee12,
            filename=filename,
            title=cloud_name,
            fit_labels=[r'$A_{V,{\rm Lee+12}}$', r'$A_{V,{\rm K+09}}$'],
            labels=[
                r'$A_{V,{\rm K+09\ 2MASS}}$ [mag]',
                r'$A_{V,{\rm Lee+12\ 2MASS}}$ [mag]'
            ],
            limits=[-1, 12, -1, 12])

        filename = figure_dir + 'av/' + cloud_name + '_planck_vs_lee12.png'
        plot_planck_vs_2mass(
            av_data_lee12,
            av_data_pl,
            filename=filename,
            title=cloud_name,
            fit_labels=[r'$A_{V,{\rm Lee+12}}$',
                        r'$A_{V,{\rm Planck}}$'][::-1],
            labels=[
                r'$A_{V,{\rm Planck}}$ [mag]',
                r'$A_{V,{\rm Lee+12\ 2MASS}}$ [mag]'
            ][::-1],
            limits=[-1, 12, -1, 12])
def main():

    # define constants
    DIR_FIGURES = '/d/bip3/ezbc/multicloud/figures/'
    DIR_RESULTS = '/d/bip3/ezbc/multicloud/data/python_output/bootstrap_results/'
    DIR_AV = '/d/bip3/ezbc/multicloud/data/av/'
    DIR_BETA = '/d/bip3/ezbc/multicloud/data/dust_temp/'
    DIR_REGION = '/d/bip3/ezbc/multicloud/data/python_output/regions/'

    FILENAME_EXT = '_planck_noint_gaussrange_isotropic_bootstrap_results.pickle'
    FILENAME_PLOT_BASE = DIR_FIGURES + 'dust/av_vs_beta'
    PLOT_FILETYPES = ['png', 'pdf']
    CLOUD_NAMES = ['california', 'perseus', 'taurus']

    beta, beta_header = fits.getdata(DIR_BETA + \
                'multicloud_dust_beta_5arcmin.fits',
            header=True)

    temp, temp_header = fits.getdata(DIR_BETA + \
                'multicloud_dust_temp_5arcmin.fits',
            header=True)
    av, av_header = fits.getdata(DIR_AV + \
                #'multicloud_av_planck_5arcmin.fits',
                'multicloud_av_k09_nan_regrid_planckres.fits',
            header=True)

    region_filename = DIR_REGION + 'multicloud_divisions.reg'

    # get the data needed to plot
    plot_dict = {}
    for cloud_name in CLOUD_NAMES:
        plot_dict[cloud_name] = {}
        cloud_dict = plot_dict[cloud_name]

        # get mask for data
        region_mask = calc_region_mask(region_filename,
                                       av,
                                       av_header,
                                       region_name=cloud_name)

        # load the analysis
        results = load_results(DIR_RESULTS + cloud_name + FILENAME_EXT)

        hi_sd = results['data_products']['hi_sd']
        h2_sd = results['data_products']['h2_sd']

        cloud_dict['contour_image'] = None# hi_sd
        cloud_dict['contours'] = [4, 8]
        cloud_dict['h2_sd'] = h2_sd
        cloud_dict['header'] = results['data']['av_header']
        cloud_dict['beta'] = beta[~region_mask]
        cloud_dict['av'] = av[~region_mask]

        if cloud_name == 'california':
            plot_dict[cloud_name]['limits'] = [75, 60, 30, 38]
        if cloud_name == 'perseus':
            plot_dict[cloud_name]['limits'] = [61, 45, 24, 36]
        if cloud_name == 'taurus':
            plot_dict[cloud_name]['limits'] = [75, 57, 20, 33]

    # plot the 3-panel H2 surface density map
    for filetype in PLOT_FILETYPES:
        plot_av_vs_beta_grid(plot_dict,
                             filename=FILENAME_PLOT_BASE + '.' + filetype,
                             poly_fit=True,
                             #vlimits=[-0.1, 50],
                             )
def run_analysis(cloud_name):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import myio

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    likelihood_dir = \
            '/d/bip3/ezbc/' + cloud_name + '/data/python_output/nhi_av/'
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    av_planck_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_error_filename = av_dir + \
       cloud_name + '_av_error_planck_tau353_5arcmin.fits'
    av_k09_filename = av_dir + \
       cloud_name + '_av_k09_regrid_planckres.fits'
    if cloud_name == 'perseus':
        av_lee12_filename = av_dir + \
           cloud_name + '_av_lee12_2mass_regrid_planckres.fits'
    else:
        av_lee12_filename = None

    av_error = 0.4

    av_background = 0.0

    # Get data
    av_data_pl, av_header_pl = fits.getdata(av_planck_filename, header=True)
    av_data_k09, av_header_k09 = fits.getdata(av_k09_filename, header=True)
    if cloud_name == 'perseus':
        av_data_lee12, av_header_lee12 = fits.getdata(av_lee12_filename,
                                                      header=True)
    else:
        av_data_lee12, av_header_lee12 = None, None

    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data_pl,
                                   av_header_pl,
                                   region_name=cloud_name)

    av_data_pl[region_mask] = np.nan
    av_data_k09[region_mask] = np.nan
    if cloud_name == 'perseus':
        av_data_lee12[region_mask] = np.nan

    #if debugging:
    if 0:
        import matplotlib.pyplot as plt
        plt.close(); plt.clf()
        plt.imshow(av_data, origin='lower')
        plt.savefig('/usr/users/ezbc/Desktop/avmap.png')

    #hi_data, hi_header = fits.getdata(hi_filename, header=True)
    #co_data, co_header = fits.getdata(co_filename, header=True)
    #hi_vel_axis = make_velocity_axis(hi_header)

    filename = figure_dir + 'av/' + cloud_name + '_planck_vs_k09.png'
    plot_planck_vs_2mass(av_data_k09, av_data_pl,
                         filename=filename,
                         title=cloud_name,
                         fit_labels=[r'$A_{V,{\rm Planck}}$',
                                     r'$A_{V,{\rm K+09}}$'],
                         labels=[r'$A_{V,{\rm K+09\ 2MASS}}$ [mag]',
                                 r'$A_{V,{\rm Planck}}$ [mag]'],
                         limits=[-1, 20, -1, 20])
    if cloud_name == 'perseus':
        filename = figure_dir + 'av/' + cloud_name + '_lee12_vs_k09.png'
        plot_planck_vs_2mass(av_data_k09, av_data_lee12,
                             filename=filename,
                             title=cloud_name,
                             fit_labels=[r'$A_{V,{\rm Lee+12}}$',
                                         r'$A_{V,{\rm K+09}}$'],
                             labels=[r'$A_{V,{\rm K+09\ 2MASS}}$ [mag]',
                                     r'$A_{V,{\rm Lee+12\ 2MASS}}$ [mag]'],
                             limits=[-1, 12, -1, 12])

        filename = figure_dir + 'av/' + cloud_name + '_planck_vs_lee12.png'
        plot_planck_vs_2mass(av_data_lee12, av_data_pl,
                             filename=filename,
                             title=cloud_name,
                             fit_labels=[r'$A_{V,{\rm Lee+12}}$',
                                 r'$A_{V,{\rm Planck}}$'][::-1],
                             labels=[r'$A_{V,{\rm Planck}}$ [mag]',
                                 r'$A_{V,{\rm Lee+12\ 2MASS}}$ [mag]'][::-1],
                             limits=[-1, 12, -1, 12])
Example #4
0
def run_cloud_analysis(global_args, ):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir = '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
        'figure_dir': figure_dir,
        'cloud_name': cloud_name,
        'filename_base': filename_base,
        'plot_diagnostics': global_args['plot_diagnostics'],
        #'av_nhi_contour': av_nhi_contour,
        'av_nhi_contour': True,
        'av_nhi_limits': [0, 20, -1, 9],
        #'av_nhi_limits': None,
    }

    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])

    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)

    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)

    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                                 statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
        'velocity_range': velocity_range,
        'gauss_fits': gauss_fits,
        'comp_num': comp_num,
        'hi_range_error': hi_range_error,
        'vel_range': velocity_range,
        'gauss_fit_kwargs': gauss_fit_kwargs,
    }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_spectrum,
        gauss_fits=gauss_fits,
        comp_num=comp_num,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(
        hi_dr1_spectrum,
        hi_vel_axis,
        hi_std_spectrum=hi_std_dr1_spectrum,
        gauss_fits=gauss_fits_dr1,
        comp_num=comp_num_dr1,
        co_spectrum=co_spectrum,
        co_vel_axis=co_vel_axis,
        vel_range=velocity_range_dr1,
        filename=filename,
        limits=[-50, 30, -10, 70],
    )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(
        nhi_image=nhi_image / nhi_image_dr1,
        header=hi_header,
        limits=[65, 45, 25, 35],
        filename=filename,
        show=0,
        cb_text='DR2 / DR1',
        #hi_vlimits=[0.91, 0.93],
    )
def run_cloud_analysis(global_args,):

    from astropy.io import fits
    from myimage_analysis import calculate_nhi, calc_region_mask
    import myimage_analysis as myia
    from mycoords import make_velocity_axis
    from mystats import calc_symmetric_error, calc_logL
    import os
    import myio
    import pickle
    import mystats

    cloud_name = global_args['cloud_name']
    region = global_args['region']
    load = global_args['load']
    data_type = global_args['data_type']
    background_subtract = global_args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/multicloud/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    dust_temp_dir = '/d/bip3/ezbc/' + cloud_name + '/data/dust_temp/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/regions/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    results_dir =  '/d/bip3/ezbc/multicloud/data/python_output/'

    av_filename = av_dir + \
       cloud_name + '_av_planck_tau353_5arcmin.fits'
    av_data, av_header = fits.getdata(av_filename, header=True)

    # define filenames
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_dr1_filename = hi_dir + \
       cloud_name + '_hi_galfa_dr1_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    # Get the filename base to differentiate between different parameters
    filename_base, global_args = create_filename_base(global_args)

    # set up plotting variables
    plot_kwargs = {
                   'figure_dir': figure_dir,
                   'cloud_name': cloud_name,
                   'filename_base': filename_base,
                   'plot_diagnostics': global_args['plot_diagnostics'],
                   #'av_nhi_contour': av_nhi_contour,
                   'av_nhi_contour': True,
                   'av_nhi_limits': [0, 20, -1, 9],
                   #'av_nhi_limits': None,
                    }


    # mask data
    region_filename = region_dir + 'multicloud_divisions.reg'
    region_mask = calc_region_mask(region_filename,
                                   av_data,
                                   av_header,
                                   region_name=global_args['region_name'])


    # Load HI and CO cubes
    hi_data, hi_header = fits.getdata(hi_filename, header=True)
    hi_dr1_data, hi_dr1_header = fits.getdata(hi_dr1_filename, header=True)
    co_data, co_header = fits.getdata(co_filename, header=True)


    #hi_data[:, region_mask] = np.nan
    #hi_dr1_data[:, region_mask] = np.nan
    #co_data[:, region_mask] = np.nan

    hi_vel_axis = make_velocity_axis(hi_header)
    co_vel_axis = make_velocity_axis(co_header)

    # Load HI error
    if global_args['clobber_hi_error']:
        print('\n\tCalculating HI noise cube...')
        os.system('rm -rf ' + hi_error_filename)
        hi_data_error = \
            myia.calculate_noise_cube(cube=hi_data,
                                      velocity_axis=hi_vel_axis,
                                      velocity_noise_range=[-110,-90, 90,110],
                                      Tsys=30.0,
                                      filename=hi_error_filename)
    else:
        hi_data_error = fits.getdata(hi_error_filename)


    # Derive N(HI)
    # -------------------------------------------------------------------------
    # get fit kwargs
    gauss_fit_kwargs, ncomps_in_cloud = get_gauss_fit_kwargs(global_args)

    # derive spectra or load
    spectra_filename = results_dir + 'spectra/' + global_args['cloud_name'] + \
            '_spectra.pickle'
    spectra_dr1_filename = results_dir + 'spectra/' + \
                           global_args['cloud_name'] + \
                           '_spectra_dr1.pickle'
    load_spectra = myio.check_file(spectra_filename,
                                   clobber=global_args['clobber_spectra'])
    if load_spectra:
        hi_spectrum, hi_std_spectrum, co_spectrum = \
                myio.load_pickle(spectra_filename)
        hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum = \
                myio.load_pickle(spectra_dr1_filename)
    else:
        print('\n\tCalculating spectra...')
        if global_args['smooth_hi_to_co_res']:
            from astropy.convolution import Gaussian2DKernel, convolve
            # Create kernel
            # one pix = 5 arcmin, need 8.4 arcmin for CO res
            # The beamsize is the FWHM. The convolution kernel needs the
            # standard deviation
            hi_res = 1.0
            co_res = 8.4 / 5.0
            width = (co_res**2 - hi_res**2)**0.5
            std = width / 2.355
            g = Gaussian2DKernel(width)

            # Convolve data
            hi_data_co_res = np.zeros(hi_data.shape)
            for i in xrange(hi_data.shape[0]):
                hi_data_co_res[i, :, :] = \
                    convolve(hi_data[i, :, :], g, boundary='extend')

            hi_dr1_data_co_res = np.zeros(hi_dr1_data.shape)
            for i in xrange(hi_dr1_data.shape[0]):
                hi_dr1_data_co_res[i, :, :] = \
                    convolve(hi_dr1_data[i, :, :], g, boundary='extend')

        hi_spectrum = myia.calc_spectrum(hi_data_co_res)
        hi_std_spectrum = myia.calc_spectrum(hi_data_co_res,
                                             statistic=np.nanstd)
        hi_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res)
        hi_std_dr1_spectrum = myia.calc_spectrum(hi_dr1_data_co_res,
                                             statistic=np.nanstd)
        co_spectrum = myia.calc_spectrum(co_data)
        myio.save_pickle(spectra_filename,
                         (hi_spectrum, hi_std_spectrum, co_spectrum))
        myio.save_pickle(spectra_dr1_filename,
                         (hi_dr1_spectrum, hi_std_dr1_spectrum, co_spectrum))

    if global_args['hi_range_calc'] == 'gaussian':
        velocity_range, gauss_fits, comp_num, hi_range_error = \
                calc_hi_vel_range(hi_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
        global_args['vel_range_error'] = hi_range_error
        velocity_range_dr1, gauss_fits_dr1, comp_num_dr1, hi_range_error_dr1 = \
                calc_hi_vel_range(hi_dr1_spectrum,
                                  hi_vel_axis,
                                  gauss_fit_kwargs,
                                  co_spectrum=co_spectrum,
                                  co_vel_axis=co_vel_axis,
                                  ncomps=ncomps_in_cloud,
                                  )
    else:
        velocity_range = [-5, 15]
        gauss_fits = None
        comp_num = None

    hi_range_kwargs = {
                       'velocity_range': velocity_range,
                       'gauss_fits': gauss_fits,
                       'comp_num': comp_num,
                       'hi_range_error': hi_range_error,
                       'vel_range': velocity_range,
                       'gauss_fit_kwargs': gauss_fit_kwargs,
                       }

    # plot the results
    # --------------------------------------------------------------------------
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr2.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_spectrum,
                 gauss_fits=gauss_fits,
                 comp_num=comp_num,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    # DR1 data
    filename = plot_kwargs['figure_dir'] + \
               'spectra/' + plot_kwargs['filename_base'] + \
               '_spectra_dr1.png'
    print('Saving\neog ' + filename + ' &')
    plot_spectra(hi_dr1_spectrum,
                 hi_vel_axis,
                 hi_std_spectrum=hi_std_dr1_spectrum,
                 gauss_fits=gauss_fits_dr1,
                 comp_num=comp_num_dr1,
                 co_spectrum=co_spectrum,
                 co_vel_axis=co_vel_axis,
                 vel_range=velocity_range_dr1,
                 filename=filename,
                 limits=[-50, 30, -10, 70],
                 )

    velocity_range = [0, 15]
    velocity_range_dr1 = [0, 15]
    # use the vel range to derive N(HI)
    nhi_image, nhi_image_error = \
        calculate_nhi(cube=hi_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range,
                      noise_cube=hi_data_error,
                      return_nhi_error=True,
                      )
    # use the vel range to derive N(HI)
    nhi_image_dr1 = \
        calculate_nhi(cube=hi_dr1_data,
                      velocity_axis=hi_vel_axis,
                      velocity_range=velocity_range_dr1,
                      )

    # mask for erroneous pixels
    mask_nhi = (nhi_image < 0) & (nhi_image_dr1 < 0)
    nhi_image[mask_nhi] = np.nan
    nhi_image_dr1[mask_nhi] = np.nan

    # Plot residuals between nhi maps
    filename = plot_kwargs['figure_dir'] + \
               'maps/' + plot_kwargs['filename_base'] + \
               '_nhi_dr2_dr1_residuals.png'
    print('Saving\neog ' + filename + ' &')
    plot_nhi_image(nhi_image=nhi_image / nhi_image_dr1,
                   header=hi_header,
                   limits=[65, 45, 25, 35],
                   filename=filename,
                   show=0,
                   cb_text='DR2 / DR1',
                   #hi_vlimits=[0.91, 0.93],
                   )