Esempio n. 1
0
            def test_mle_derivation(self, ):

                from numpy.testing import assert_array_almost_equal
                from numpy.testing import assert_almost_equal
                from myimage_analysis import calculate_nhi

                dgr = 0.1  # cm^2 10^-20 mag
                intercept = 1  # mag
                width = 20  # km/s

                vel_range = (self.cloud.vel_center - width / 2.0,
                             self.cloud.vel_center + width / 2.0)

                nhi_image = calculate_nhi(cube=self.cloud.hi_data,
                                          velocity_axis=self.cloud.hi_vel_axis,
                                          velocity_range=vel_range)

                # Create mock Av_data
                if 0:
                    av_data_mock = dgr * nhi_image + intercept

                    self.cloud.av_data = av_data_mock

                    self.cloud.run_analysis(
                        region_filename=self.region_filename,
                        region=self.region)

                    print('\nSaving cloud...')
                    cloudpy.save(self.cloud, self.cloud_filename)
                else:
                    self.cloud = cloudpy.load(self.cloud_filename)

                dgr_mle = self.cloud.props['dust2gas_ratio_max']['value']
                intercept_mle = self.cloud.props['intercept_max']['value']
                width_mle = self.cloud.props['hi_velocity_width_max']['value']

                assert_almost_equal(dgr_mle, dgr, decimal=1)
                assert_almost_equal(intercept_mle, intercept, decimal=1)
                assert_almost_equal(width_mle, width, decimal=-1)
Esempio n. 2
0
            def test_mle_derivation(self,):

                from numpy.testing import assert_array_almost_equal
                from numpy.testing import assert_almost_equal
                from myimage_analysis import calculate_nhi

                dgr = 0.1 # cm^2 10^-20 mag
                intercept = 1 # mag
                width = 20 # km/s

                vel_range = (self.cloud.vel_center - width / 2.0,
                             self.cloud.vel_center + width / 2.0)

                nhi_image = calculate_nhi(cube=self.cloud.hi_data,
                                          velocity_axis=self.cloud.hi_vel_axis,
                                          velocity_range=vel_range)

                # Create mock Av_data
                if 0:
                    av_data_mock = dgr * nhi_image + intercept

                    self.cloud.av_data = av_data_mock

                    self.cloud.run_analysis(region_filename=self.region_filename,
                                            region=self.region)

                    print('\nSaving cloud...')
                    cloudpy.save(self.cloud, self.cloud_filename)
                else:
                    self.cloud = cloudpy.load(self.cloud_filename)

                dgr_mle = self.cloud.props['dust2gas_ratio_max']['value']
                intercept_mle = self.cloud.props['intercept_max']['value']
                width_mle = self.cloud.props['hi_velocity_width_max']['value']

                assert_almost_equal(dgr_mle, dgr, decimal=1)
                assert_almost_equal(intercept_mle, intercept, decimal=1)
                assert_almost_equal(width_mle, width, decimal=-1)
Esempio n. 3
0
            def test_run_anaylsis(self):

                self.cloud.run_analysis(region_filename=self.region_filename,
                                        region=self.region)

                props, iter_vars = self.cloud.props, self.cloud.iter_vars

                print('\nSaving props...')
                cloudpy.save(props,
                             '/d/bip3/ezbc/multicloud/data/props.pickle')
                cloudpy.save(iter_vars,
                             '/d/bip3/ezbc/multicloud/data/iter_vars.pickle')

                print('\nSaving cloud...')
                cloudpy.save(self.cloud, self.cloud_filename)
Esempio n. 4
0
            def test_run_anaylsis(self):

                self.cloud.run_analysis(region_filename=self.region_filename,
                                        region=self.region)

                props, iter_vars = self.cloud.props, self.cloud.iter_vars

                print('\nSaving props...')
                cloudpy.save(props,
                             '/d/bip3/ezbc/multicloud/data/props.pickle')
                cloudpy.save(iter_vars,
                                   '/d/bip3/ezbc/multicloud/data/iter_vars.pickle')

                print('\nSaving cloud...')
                cloudpy.save(self.cloud, self.cloud_filename)
Esempio n. 5
0
def run_cloud_analysis(args,
    cloud_name = 'perseus',
    region = None,
    load = False,
    data_type = 'planck'):

    if 1:
        cloud_name = args['cloud_name']
        region = args['region']
        load = args['load']
        data_type = args['data_type']
        background_subtract = args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/' + cloud_name + '/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    likelihood_dir = \
            '/d/bip3/ezbc/' + cloud_name + '/data/python_output/nhi_av/'

    # define filename
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    if cloud_name == 'perseus' and data_type == 'lee12':
        av_filename = av_dir + \
           cloud_name + '_av_lee12_iris_regrid_planckres.fits'
        av_error_filename = None
        av_error = 0.1
        if background_subtract:
            av_background = 0.5
        else:
            av_background = None
    if data_type == 'planck':
        av_filename = av_dir + \
           cloud_name + '_av_planck_tau353_5arcmin.fits'
        av_error_filename = av_dir + \
           cloud_name + '_av_error_planck_tau353_5arcmin.fits'
        av_error = None
        if 0:
            av_error_filename = None
            av_error = 1
        av_background = None
    if cloud_name == 'perseus' and data_type == 'planck_lee12mask':
        av_filename = av_dir + \
           cloud_name + '_av_planck_tau353_5arcmin_lee12mask.fits'
        av_error_filename = av_dir + \
           cloud_name + '_av_error_planck_tau353_5arcmin.fits'
        av_error = None
        av_background = None
    if data_type == 'k09':
        av_filename = av_dir + \
           cloud_name + '_av_k09_regrid_planckres.fits'

        av_error_filename = None
        av_error = 0.4

        av_background = 0.0

    # Name of diagnostic files
    if background_subtract:
        background_name = '_backsub'
    else:
        background_name = ''

    if args['bin_image']:
        bin_name = '_binned'
        args['bin_procedure'] = 'all'
    else:
        bin_name = ''
        args['bin_procedure'] = 'none'
    if args['fixed_width'] is None:
        width_name = ''
        init_vel_width = args['init_vel_width']
        vel_center_gauss_fit_kwargs = None
    else:
        if args['fixed_width'] == 'gaussfit':
            if args['cloud_name'] == 'perseus':
                guesses = (28, 3, 5,
                           2, -20, 20)
                ncomps = 2
            elif args['cloud_name'] == 'taurus':
                guesses = (28, 3, 5,
                           5, -30, 20,
                           3, -15, 5,
                           )
                ncomps = 3
            elif args['cloud_name'] == 'california':
                guesses = (50, 3, 5,
                           20, -10, 10,
                           3, -45, 10,
                           #2, -20, 20,
                           )
                ncomps = 3
            vel_center_gauss_fit_kwargs = {'guesses': guesses,
                                           'ncomps': ncomps,
                                           #'width_scale': 2,
                                           }
        else:
            vel_center_gauss_fit_kwargs = None
        width_name = '_fixedwidth'
        init_vel_width = args['fixed_width']
    if args['use_weights']:
        weights_name = '_weights'
        weights_filename = av_dir + \
           cloud_name + '_bin_weights.fits'
    else:
        weights_name = ''
        weights_filename = None
    if args['region'] is None:
        region_name = ''
        region = cloud_name
    else:
        region_name = '_region' + args['region']
        region = cloud_name + args['region']
    if args['av_mask_threshold'] is not None:
        avthres_name = '_avthres'
    else:
        avthres_name = ''
    if not args['use_intercept']:
        intercept_name = '_noint'
    else:
        intercept_name = ''
    if args['recalculate_likelihoods']:
        error_name = '_errorrecalc'
    else:
        error_name = ''
    if args['subtract_comps']:
        compsub_name = '_compsub'
    else:
        compsub_name = ''

    filename_extension = cloud_name + '_' + data_type + background_name + \
            bin_name + weights_name + '_' + args['likelihood_resolution'] + \
            'res' + region_name + width_name + avthres_name + \
            intercept_name + error_name + compsub_name

    # Plot args
    residual_hist_filename_base = figure_dir + 'diagnostics/residuals/' + \
                                  filename_extension + '_residual_hist'
    residual_map_filename_base = figure_dir + 'diagnostics/residuals/' + \
                                  filename_extension + '_residual_map'
    likelihood_filename_base = figure_dir + 'diagnostics/likelihoods/' + \
                                  filename_extension + '_likelihood'
    av_bin_map_filename_base = figure_dir + 'diagnostics/maps/' + \
                                  filename_extension + '_bin_map'

    plot_args = {
            'residual_hist_filename_base': residual_hist_filename_base,
            'residual_map_filename_base': residual_map_filename_base,
            'likelihood_filename_base': likelihood_filename_base,
            'av_bin_map_filename_base' : av_bin_map_filename_base,
            }

    if 0:
        import os
        os.system('rm -rf ' + hi_error_filename)

    region_filename = region_dir + 'multicloud_divisions.reg'


    cloud_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/' + \
            filename_extension + \
            '.pickle'
    cloud_likelihood_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/' + \
            filename_extension + \
            '_likelihoods.npy'
    props_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/' + \
            filename_extension + \
            '_props.pickle'

    # background filenames
    background_filename = av_dir + filename_extension + '_background.fits'
    background_region_filename = background_region_dir + cloud_name + \
                                 '_background.reg'

    mask_filename = av_dir + filename_extension + '_mask.fits'

    # Lee+12 testing
    if args['fixed_width'] == 20 and args['data_type'] == 'lee12':
        vel_center = 5
    else:
        vel_center = None

    diagnostic_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/diagnostics/' + \
            filename_extension + '_diagnostic.txt'

    if args['likelihood_resolution'] == 'fine':
        if args['fixed_width'] is not None:
            width_grid = np.array((args['fixed_width'],))
        else:
            width_grid = np.arange(1, 70, 2*0.16667)
        #width_grid = np.arange(30, 70, 2*0.16667)
        dgr_grid = np.arange(0.0, 0.2, 2e-4)
        #dgr_grid = np.arange(0.05, 0.15, 2e-4)
        intercept_grid = np.arange(-1, 1, 0.005)
        #intercept_grid = np.arange(-1, 0., 0.005)
    elif args['likelihood_resolution'] == 'coarse':
        if args['fixed_width'] is not None:
            width_grid = np.array((args['fixed_width'],))
        else:
            width_grid = np.arange(1, 50, 2*0.16667)
        #width_grid = np.arange(100, 101, 1)
        dgr_grid = np.arange(0.000, 0.6, 3e-3)
        if args['use_intercept']:
            intercept_grid = np.arange(-4, 4, 0.1)
        else:
            intercept_grid = np.array((0,))

        #dgr_grid = np.arange(0.1, 0.5, 3e-3)
        #intercept_grid = np.arange(-5, 1, 0.1)
        #intercept_grid = np.array((0.9,))
    else:
        raise ValueError('likelihood_resolution should be either ' + \
                         '"coarse" or "fine"')

    # Define number of pixels in each bin
    # size of binned pixel in degrees * number of arcmin / degree * number of
    # arcmin / pixel
    binsize = 0.5 * 60.0 / 5.0

    if load:
        try:
            if not args['load_props']:
                print('\nAttempting to load cloud from file \n' + \
                      cloud_filename)
                cloud = cloudpy.load(cloud_filename,
                           binary_likelihood_filename=cloud_likelihood_filename,
                               load_fits=True)
                cloudpy.save(cloud.props, props_filename)
                props = cloud.props
            else:
                print('\nAttempting to load cloud props from file \n' + \
                      props_filename)
                cloud = None
                props = cloudpy.load(props_filename)
            run_analysis = False
        except (EOFError, IOError):
            print('\nLoading failed, performing analysis')
            run_analysis = True
    else:
        run_analysis = True

    if run_analysis:
        print('\n\nPerforming analysis on ' + cloud_name)

        if cloud_name == 'california':
            if args['background_subtract']:
                print('\nPerforming a background subtraction...')
                av_filename_back = av_filename.replace('.fits', '_bin.fits')
                av_background = perform_background_subtraction(av_filename_back,
                       background_dim=2,
                       #background_init=0.9,
                       background_filename=\
                               background_filename,
                       background_region_filename=\
                               background_region_filename)
                #intercept_grid = np.arange(0, 1, 1)
                av_background = 0.9
                #results['av_background'] = av_background


        cloud = cloudpy.Cloud(av_filename,
                              hi_filename,
                              av_error_filename=av_error_filename,
                              av_error=av_error,
                              av_background=av_background,
                              mask_filename=mask_filename,
                              hi_error_filename=hi_error_filename,
                              cloud_prop_filename=prop_filename,
                              dgr_grid=dgr_grid,
                              intercept_grid=intercept_grid,
                              width_grid=width_grid,
                              residual_width_scale=3,
                              threshold_delta_dgr=0.001,
                              #threshold_delta_dgr=1,
                              hi_noise_vel_range=[90,110],
                              vel_range_diff_thres=2,
                              init_vel_width=init_vel_width,
                              vel_center=vel_center,
                              vel_center_gauss_fit_kwargs=\
                                      vel_center_gauss_fit_kwargs,
                              subtract_comps=args['subtract_comps'],
                              verbose=True,
                              clobber_likelihoods=True,
                              recalculate_likelihoods=\
                                      args['recalculate_likelihoods'],
                              binsize=binsize,
                              use_bin_weights=args['use_weights'],
                              use_only_binned_data=args['bin_image'],
                              bin_procedure=args['bin_procedure'],
                              pixel_mask_increase_fraction=0.05,
                              binned_data_filename_ext=\
                                args['binned_data_filename_ext'],
                              weights_filename=weights_filename,
                              #diagnostic_filename=diagnostic_filename,
                              av_mask_threshold=args['av_mask_threshold'],
                              plot_args=plot_args,
                              perform_parent_iterations=0,
                              )

        cloud.run_analysis(region_filename=region_filename,
                           region=region)


        print('\nSaving cloud to file \n' + cloud_filename)
        cloudpy.save(cloud,
                     cloud_filename,
                     binary_likelihood_filename=cloud_likelihood_filename,
                     write_fits=False)
        cloudpy.save(cloud.props, props_filename)
        props = cloud.props

    cloud.co_filename = co_filename

    results = {}
    results['cloud'] = cloud
    results['cloud_name'] = cloud_name
    results['props'] = props
    results['args'] = args
    results['filename_extension'] = filename_extension
    results['figure_dir'] = figure_dir

    return results
Esempio n. 6
0
        def setup(self):

            # define directory locations
            # --------------------------
            self.output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
            self.figure_dir = \
                '/d/bip3/ezbc/perseus/figures/'
            self.av_dir = '/d/bip3/ezbc/perseus/data/av/'
            self.hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
            self.co_dir = '/d/bip3/ezbc/perseus/data/co/'
            self.core_dir = \
                    '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
            self.property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
            self.region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
            self.likelihood_dir = \
                    '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'

            # define filenames
            self.prop_filename = self.property_dir + \
                    'perseus_global_properties.txt'
            self.av_filename = self.av_dir + \
                    'perseus_av_planck_tau353_5arcmin.fits'
            self.av_error_filename = self.av_dir + \
                    'perseus_av_error_planck_tau353_5arcmin.fits'
            self.hi_filename = self.hi_dir + \
                    'perseus_hi_galfa_cube_regrid_planckres.fits'
            self.hi_error_filename = self.hi_dir + \
                    'perseus_hi_galfa_cube_regrid_planckres_noise.fits'

            self.region = 'perseus'
            self.region_filename = self.region_dir + 'multicloud_divisions.reg'
            self.cloud_filename = \
                    '/d/bip3/ezbc/multicloud/data/cloud.pickle'

            # Plot args
            residual_hist_filename_base = self.figure_dir + \
                                          'diagnostics/residuals/' + \
                                          self.region + '_residual_hist'
            residual_map_filename_base = self.figure_dir + 'diagnostics/residuals/' + \
                                          self.region + '_residual_map'
            likelihood_filename_base = self.figure_dir + 'diagnostics/likelihoods/' + \
                                          self.region + '_likelihood'
            av_bin_map_filename_base = self.figure_dir + 'diagnostics/maps/' + \
                                          self.region + '_bin_map'

            plot_args = {
                'residual_hist_filename_base': residual_hist_filename_base,
                'residual_map_filename_base': residual_map_filename_base,
                'likelihood_filename_base': likelihood_filename_base,
                'av_bin_map_filename_base': av_bin_map_filename_base,
            }

            width_grid = np.arange(1, 75, 6 * 0.16667)
            dgr_grid = np.arange(0.001, 0.3, 5e-3)
            intercept_grid = np.arange(-2, 2, 0.1)

            self.width_grid = width_grid
            self.dgr_grid = dgr_grid
            self.intercept_grid = intercept_grid

            # Define number of pixels in each bin
            self.binsize = 1.0 * 60.0 / 5.0

            self.cloud = cloudpy.Cloud(
                self.av_filename,
                self.hi_filename,
                av_error_filename=self.av_error_filename,
                hi_error_filename=self.hi_error_filename,
                cloud_prop_filename=self.prop_filename,
                dgr_grid=self.dgr_grid,
                intercept_grid=self.intercept_grid,
                width_grid=self.width_grid,
                residual_width_scale=3.0,
                threshold_delta_dgr=0.0001,
                hi_noise_vel_range=[90, 110],
                vel_range_diff_thres=10,
                init_vel_range=[-50, 50],
                verbose=True,
                clobber_likelihoods=True,
                binsize=self.binsize,
                plot_args=plot_args,
            )

            if 0:
                cloudpy.save(
                    self.cloud,
                    self.cloud_filename.replace('cloud.pickle',
                                                'cloud_init.pickle'))
Esempio n. 7
0
        def setup(self):

            # define directory locations
            # --------------------------
            self.output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
            self.figure_dir = \
                '/d/bip3/ezbc/perseus/figures/'
            self.av_dir = '/d/bip3/ezbc/perseus/data/av/'
            self.hi_dir = '/d/bip3/ezbc/perseus/data/hi/'
            self.co_dir = '/d/bip3/ezbc/perseus/data/co/'
            self.core_dir = \
                    '/d/bip3/ezbc/perseus/data/python_output/core_properties/'
            self.property_dir = '/d/bip3/ezbc/perseus/data/python_output/'
            self.region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
            self.likelihood_dir = \
                    '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'

            # define filenames
            self.prop_filename = self.property_dir + \
                    'perseus_global_properties.txt'
            self.av_filename = self.av_dir + \
                    'perseus_av_planck_tau353_5arcmin.fits'
            self.av_error_filename = self.av_dir + \
                    'perseus_av_error_planck_tau353_5arcmin.fits'
            self.hi_filename = self.hi_dir + \
                    'perseus_hi_galfa_cube_regrid_planckres.fits'
            self.hi_error_filename = self.hi_dir + \
                    'perseus_hi_galfa_cube_regrid_planckres_noise.fits'

            self.region = 'perseus'
            self.region_filename = self.region_dir + 'multicloud_divisions.reg'
            self.cloud_filename = \
                    '/d/bip3/ezbc/multicloud/data/cloud.pickle'

            # Plot args
            residual_hist_filename_base = self.figure_dir + \
                                          'diagnostics/residuals/' + \
                                          self.region + '_residual_hist'
            residual_map_filename_base = self.figure_dir + 'diagnostics/residuals/' + \
                                          self.region + '_residual_map'
            likelihood_filename_base = self.figure_dir + 'diagnostics/likelihoods/' + \
                                          self.region + '_likelihood'
            av_bin_map_filename_base = self.figure_dir + 'diagnostics/maps/' + \
                                          self.region + '_bin_map'

            plot_args = {
                    'residual_hist_filename_base': residual_hist_filename_base,
                    'residual_map_filename_base': residual_map_filename_base,
                    'likelihood_filename_base': likelihood_filename_base,
                    'av_bin_map_filename_base' : av_bin_map_filename_base,
                    }

            width_grid = np.arange(1, 75, 6*0.16667)
            dgr_grid = np.arange(0.001, 0.3, 5e-3)
            intercept_grid = np.arange(-2, 2, 0.1)

            self.width_grid = width_grid
            self.dgr_grid = dgr_grid
            self.intercept_grid = intercept_grid

            # Define number of pixels in each bin
            self.binsize = 1.0 * 60.0 / 5.0

            self.cloud = cloudpy.Cloud(self.av_filename,
                                  self.hi_filename,
                                  av_error_filename=self.av_error_filename,
                                  hi_error_filename=self.hi_error_filename,
                                  cloud_prop_filename=self.prop_filename,
                                  dgr_grid=self.dgr_grid,
                                  intercept_grid=self.intercept_grid,
                                  width_grid=self.width_grid,
                                  residual_width_scale=3.0,
                                  threshold_delta_dgr=0.0001,
                                  hi_noise_vel_range=[90,110],
                                  vel_range_diff_thres=10,
                                  init_vel_range=[-50,50],
                                  verbose=True,
                                  clobber_likelihoods=True,
                                  binsize=self.binsize,
                                  plot_args=plot_args,
                                  )

            if 0:
                cloudpy.save(self.cloud,
                             self.cloud_filename.replace('cloud.pickle',
                                                         'cloud_init.pickle'))
Esempio n. 8
0
def run_cloud_analysis(args,
                       cloud_name='perseus',
                       region=None,
                       load=False,
                       data_type='planck'):

    if 1:
        cloud_name = args['cloud_name']
        region = args['region']
        load = args['load']
        data_type = args['data_type']
        background_subtract = args['background_subtract']

    # define directory locations
    # --------------------------
    figure_dir = \
        '/d/bip3/ezbc/' + cloud_name + '/figures/'
    av_dir = '/d/bip3/ezbc/' + cloud_name + '/data/av/'
    hi_dir = '/d/bip3/ezbc/' + cloud_name + '/data/hi/'
    co_dir = '/d/bip3/ezbc/' + cloud_name + '/data/co/'
    core_dir = \
       '/d/bip3/ezbc/' + cloud_name + '/data/python_output/core_properties/'
    property_dir = '/d/bip3/ezbc/' + cloud_name + '/data/python_output/'
    region_dir = '/d/bip3/ezbc/multicloud/data/python_output/'
    background_region_dir = '/d/bip3/ezbc/' + cloud_name + \
                            '/data/python_output/ds9_regions/'
    likelihood_dir = \
            '/d/bip3/ezbc/' + cloud_name + '/data/python_output/nhi_av/'

    # define filename
    prop_filename = property_dir + \
       cloud_name + '_global_properties.txt'
    hi_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres.fits'
    hi_error_filename = hi_dir + \
       cloud_name + '_hi_galfa_cube_regrid_planckres_noise.fits'
    co_filename = co_dir + \
       cloud_name + '_co_cfa_cube_regrid_planckres.fits'

    if cloud_name == 'perseus' and data_type == 'lee12':
        av_filename = av_dir + \
           cloud_name + '_av_lee12_iris_regrid_planckres.fits'
        av_error_filename = None
        av_error = 0.1
        if background_subtract:
            av_background = 0.5
        else:
            av_background = None
    if data_type == 'planck':
        av_filename = av_dir + \
           cloud_name + '_av_planck_tau353_5arcmin.fits'
        av_error_filename = av_dir + \
           cloud_name + '_av_error_planck_tau353_5arcmin.fits'
        av_error = None
        if 0:
            av_error_filename = None
            av_error = 1
        av_background = None
    if cloud_name == 'perseus' and data_type == 'planck_lee12mask':
        av_filename = av_dir + \
           cloud_name + '_av_planck_tau353_5arcmin_lee12mask.fits'
        av_error_filename = av_dir + \
           cloud_name + '_av_error_planck_tau353_5arcmin.fits'
        av_error = None
        av_background = None
    if data_type == 'k09':
        av_filename = av_dir + \
           cloud_name + '_av_k09_regrid_planckres.fits'

        av_error_filename = None
        av_error = 0.4

        av_background = 0.0

    # Name of diagnostic files
    if background_subtract:
        background_name = '_backsub'
    else:
        background_name = ''

    if args['bin_image']:
        bin_name = '_binned'
        args['bin_procedure'] = 'all'
    else:
        bin_name = ''
        args['bin_procedure'] = 'none'
    if args['fixed_width'] is None:
        width_name = ''
        init_vel_width = args['init_vel_width']
        vel_center_gauss_fit_kwargs = None
    else:
        if args['fixed_width'] == 'gaussfit':
            if args['cloud_name'] == 'perseus':
                guesses = (28, 3, 5, 2, -20, 20)
                ncomps = 2
            elif args['cloud_name'] == 'taurus':
                guesses = (
                    28,
                    3,
                    5,
                    5,
                    -30,
                    20,
                    3,
                    -15,
                    5,
                )
                ncomps = 3
            elif args['cloud_name'] == 'california':
                guesses = (
                    50,
                    3,
                    5,
                    20,
                    -10,
                    10,
                    3,
                    -45,
                    10,
                    #2, -20, 20,
                )
                ncomps = 3
            vel_center_gauss_fit_kwargs = {
                'guesses': guesses,
                'ncomps': ncomps,
                #'width_scale': 2,
            }
        else:
            vel_center_gauss_fit_kwargs = None
        width_name = '_fixedwidth'
        init_vel_width = args['fixed_width']
    if args['use_weights']:
        weights_name = '_weights'
        weights_filename = av_dir + \
           cloud_name + '_bin_weights.fits'
    else:
        weights_name = ''
        weights_filename = None
    if args['region'] is None:
        region_name = ''
        region = cloud_name
    else:
        region_name = '_region' + args['region']
        region = cloud_name + args['region']
    if args['av_mask_threshold'] is not None:
        avthres_name = '_avthres'
    else:
        avthres_name = ''
    if not args['use_intercept']:
        intercept_name = '_noint'
    else:
        intercept_name = ''
    if args['recalculate_likelihoods']:
        error_name = '_errorrecalc'
    else:
        error_name = ''
    if args['subtract_comps']:
        compsub_name = '_compsub'
    else:
        compsub_name = ''

    filename_extension = cloud_name + '_' + data_type + background_name + \
            bin_name + weights_name + '_' + args['likelihood_resolution'] + \
            'res' + region_name + width_name + avthres_name + \
            intercept_name + error_name + compsub_name

    # Plot args
    residual_hist_filename_base = figure_dir + 'diagnostics/residuals/' + \
                                  filename_extension + '_residual_hist'
    residual_map_filename_base = figure_dir + 'diagnostics/residuals/' + \
                                  filename_extension + '_residual_map'
    likelihood_filename_base = figure_dir + 'diagnostics/likelihoods/' + \
                                  filename_extension + '_likelihood'
    av_bin_map_filename_base = figure_dir + 'diagnostics/maps/' + \
                                  filename_extension + '_bin_map'

    plot_args = {
        'residual_hist_filename_base': residual_hist_filename_base,
        'residual_map_filename_base': residual_map_filename_base,
        'likelihood_filename_base': likelihood_filename_base,
        'av_bin_map_filename_base': av_bin_map_filename_base,
    }

    if 0:
        import os
        os.system('rm -rf ' + hi_error_filename)

    region_filename = region_dir + 'multicloud_divisions.reg'


    cloud_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/' + \
            filename_extension + \
            '.pickle'
    cloud_likelihood_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/' + \
            filename_extension + \
            '_likelihoods.npy'
    props_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/' + \
            filename_extension + \
            '_props.pickle'

    # background filenames
    background_filename = av_dir + filename_extension + '_background.fits'
    background_region_filename = background_region_dir + cloud_name + \
                                 '_background.reg'

    mask_filename = av_dir + filename_extension + '_mask.fits'

    # Lee+12 testing
    if args['fixed_width'] == 20 and args['data_type'] == 'lee12':
        vel_center = 5
    else:
        vel_center = None

    diagnostic_filename = \
            '/d/bip3/ezbc/multicloud/data/python_output/diagnostics/' + \
            filename_extension + '_diagnostic.txt'

    if args['likelihood_resolution'] == 'fine':
        if args['fixed_width'] is not None:
            width_grid = np.array((args['fixed_width'], ))
        else:
            width_grid = np.arange(1, 70, 2 * 0.16667)
        #width_grid = np.arange(30, 70, 2*0.16667)
        dgr_grid = np.arange(0.0, 0.2, 2e-4)
        #dgr_grid = np.arange(0.05, 0.15, 2e-4)
        intercept_grid = np.arange(-1, 1, 0.005)
        #intercept_grid = np.arange(-1, 0., 0.005)
    elif args['likelihood_resolution'] == 'coarse':
        if args['fixed_width'] is not None:
            width_grid = np.array((args['fixed_width'], ))
        else:
            width_grid = np.arange(1, 50, 2 * 0.16667)
        #width_grid = np.arange(100, 101, 1)
        dgr_grid = np.arange(0.000, 0.6, 3e-3)
        if args['use_intercept']:
            intercept_grid = np.arange(-4, 4, 0.1)
        else:
            intercept_grid = np.array((0, ))

        #dgr_grid = np.arange(0.1, 0.5, 3e-3)
        #intercept_grid = np.arange(-5, 1, 0.1)
        #intercept_grid = np.array((0.9,))
    else:
        raise ValueError('likelihood_resolution should be either ' + \
                         '"coarse" or "fine"')

    # Define number of pixels in each bin
    # size of binned pixel in degrees * number of arcmin / degree * number of
    # arcmin / pixel
    binsize = 0.5 * 60.0 / 5.0

    if load:
        try:
            if not args['load_props']:
                print('\nAttempting to load cloud from file \n' + \
                      cloud_filename)
                cloud = cloudpy.load(
                    cloud_filename,
                    binary_likelihood_filename=cloud_likelihood_filename,
                    load_fits=True)
                cloudpy.save(cloud.props, props_filename)
                props = cloud.props
            else:
                print('\nAttempting to load cloud props from file \n' + \
                      props_filename)
                cloud = None
                props = cloudpy.load(props_filename)
            run_analysis = False
        except (EOFError, IOError):
            print('\nLoading failed, performing analysis')
            run_analysis = True
    else:
        run_analysis = True

    if run_analysis:
        print('\n\nPerforming analysis on ' + cloud_name)

        if cloud_name == 'california':
            if args['background_subtract']:
                print('\nPerforming a background subtraction...')
                av_filename_back = av_filename.replace('.fits', '_bin.fits')
                av_background = perform_background_subtraction(av_filename_back,
                       background_dim=2,
                       #background_init=0.9,
                       background_filename=\
                               background_filename,
                       background_region_filename=\
                               background_region_filename)
                #intercept_grid = np.arange(0, 1, 1)
                av_background = 0.9
                #results['av_background'] = av_background


        cloud = cloudpy.Cloud(av_filename,
                              hi_filename,
                              av_error_filename=av_error_filename,
                              av_error=av_error,
                              av_background=av_background,
                              mask_filename=mask_filename,
                              hi_error_filename=hi_error_filename,
                              cloud_prop_filename=prop_filename,
                              dgr_grid=dgr_grid,
                              intercept_grid=intercept_grid,
                              width_grid=width_grid,
                              residual_width_scale=3,
                              threshold_delta_dgr=0.001,
                              #threshold_delta_dgr=1,
                              hi_noise_vel_range=[90,110],
                              vel_range_diff_thres=2,
                              init_vel_width=init_vel_width,
                              vel_center=vel_center,
                              vel_center_gauss_fit_kwargs=\
                                      vel_center_gauss_fit_kwargs,
                              subtract_comps=args['subtract_comps'],
                              verbose=True,
                              clobber_likelihoods=True,
                              recalculate_likelihoods=\
                                      args['recalculate_likelihoods'],
                              binsize=binsize,
                              use_bin_weights=args['use_weights'],
                              use_only_binned_data=args['bin_image'],
                              bin_procedure=args['bin_procedure'],
                              pixel_mask_increase_fraction=0.05,
                              binned_data_filename_ext=\
                                args['binned_data_filename_ext'],
                              weights_filename=weights_filename,
                              #diagnostic_filename=diagnostic_filename,
                              av_mask_threshold=args['av_mask_threshold'],
                              plot_args=plot_args,
                              perform_parent_iterations=0,
                              )

        cloud.run_analysis(region_filename=region_filename, region=region)

        print('\nSaving cloud to file \n' + cloud_filename)
        cloudpy.save(cloud,
                     cloud_filename,
                     binary_likelihood_filename=cloud_likelihood_filename,
                     write_fits=False)
        cloudpy.save(cloud.props, props_filename)
        props = cloud.props

    cloud.co_filename = co_filename

    results = {}
    results['cloud'] = cloud
    results['cloud_name'] = cloud_name
    results['props'] = props
    results['args'] = args
    results['filename_extension'] = filename_extension
    results['figure_dir'] = figure_dir

    return results