def main():

    ''' Script to cluster gaussian components found from AGD.

    '''

    #os.chdir('/home/ezbc/research/magellanic_stream/scripts/gausspy_decomp/')

    from localmodule import get_decomposed_data, perform_PCA

    # Set the constants
    DIR_DECOMP = '/d/bip3/ezbc/multicloud/data/decomposition/'
    DIR_FIGURE = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_DATA = 'agd_multicloud_data.pickle'
    FILENAME_TRAIN = DIR_DECOMP + 'agd_multicloud_train.pickle'
    FILENAME_TRAIN_DECOMPOSED = \
        DIR_DECOMP + 'agd_multicloud_train_decomp.pickle'
    FILENAME_DECOMPOSED = DIR_DECOMP + 'agd_multicloud_decomp.pickle'
    FILENAME_DECOMPOSED_CROP = \
        DIR_DECOMP + 'agd_multicloud_decomp_crop.pickle'
    FILENAME_DECOMP_REFORMAT = \
        DIR_DECOMP + 'agd_multicloud_decomp_reformat.pickle'
    FILENAME_CLUSTERS = DIR_DECOMP + 'agd_multicloud_clusters.pickle'
    FILENAME_PPV_BASE = DIR_FIGURE + '/ppv/ppv'
    FILENAME_PLOT = DIR_FIGURE + 'pca.png'

    CROP_LIMITS = [285, 290, -39, -35]
    CROP_LIMITS = [310, 270, -70, -20]
    PLOT_LIMITS = [300, 270, -20, -60]
    #PLOT_LIMITS = [300, 270, -20, -0]
    PLOT_LIMITS = None
    #HI_VEL_RANGE = [100, 500]
    HI_VEL_RANGE = [-100, 100]
    PLOT_NHI = 1
    PLOT_VEL = 1
    SHOW_PLOTS = 0
    DATA_PLOT_FREQ = 50

    # Clustering constants
    N_PC = 5
    N_CLUSTERS = 5
    #CLUSTER_METHOD = 'kmeans'
    CLUSTER_METHOD = 'spectral'
    #CLUSTER_METHOD = 'dbscan'

    # Load cropped decomp data?
    LOAD_DECOMP_CROP = 0
    CLOBBER_CROPPED = 0
    LOAD_REFORMATTED_DATA = 0
    LOAD_SYNTHETIC_CUBE = 0
    LOAD_CLUSTERS = 0

    # Remove cropped file?
    if not LOAD_REFORMATTED_DATA:
        if CLOBBER_CROPPED:
            os.system('rm -rf ' + FILENAME_DECOMPOSED_CROP)

        # Load the results
        if os.path.isfile(FILENAME_DECOMPOSED_CROP) and LOAD_DECOMP_CROP:
            print('\nLoading cropped file...')
            results_dict = \
                get_decomposed_data(FILENAME_DATA,
                                    filename_decomposed=\
                                        FILENAME_DECOMPOSED_CROP,
                                    load=True,
                                    )
        elif LOAD_DECOMP_CROP:
            print('\nLoading full results to be cropped...')

            # crop decomposed data
            results_dict = \
                get_decomposed_data(FILENAME_DATA,
                                    filename_decomposed=FILENAME_DECOMPOSED,
                                    load=True,
                                    )

            if 1:
                reload_wcs_positions(results_dict)
                pickle.dump(results_dict, open(FILENAME_DATA, 'w'))

            print('\nCreating cropped file...')
            results_dict = crop_results(results_dict,
                                        FILENAME_DECOMPOSED_CROP,
                                        limits=CROP_LIMITS)
        else:
            print('\nLoading full cube results...')
            # load entire dataset
            results_dict = \
                get_decomposed_data(FILENAME_DATA,
                                    filename_decomposed=FILENAME_DECOMPOSED,
                                    load=True,
                                    )

    if 0:
        FILENAME_DATA = DIR_DECOMP + 'agd_multicloud_data.pickle'
        data_dict = get_data(load=1, filename=FILENAME_DATA)
        plot_spectra(results_dict, data_dict)

    if LOAD_REFORMATTED_DATA and os.path.isfile(FILENAME_DECOMP_REFORMAT):
        results_ref = pickle.load(open(FILENAME_DECOMP_REFORMAT, 'rb'))
    else:
        results_ref = reformat_results(results_dict)

        pickle.dump(results_ref, open(FILENAME_DECOMP_REFORMAT, 'wb'))


    print('\nPerforming PCA...')
    results_ref['data_reduced'] = get_PCA(results_ref['data'],
                                          n_components=N_PC)

    print('\nNumber of components to be ' + \
          'clustered: {0:.0f}'.format(len(results_ref['data'])))
    if LOAD_CLUSTERS and os.path.isfile(FILENAME_CLUSTERS):
        results_ref['cluster_labels'] = \
            pickle.load(open(FILENAME_CLUSTERS, 'rb'))
    else:
        print('\nClustering reduced components...')
        results_ref['cluster_labels'] = \
                get_clusters(results_ref['data_reduced'],
                             n_clusters=N_CLUSTERS,
                             method=CLUSTER_METHOD)
        pickle.dump(results_ref['cluster_labels'],
                    open(FILENAME_CLUSTERS, 'wb'))

    print('\nNumber of unique clusters: ' + \
          '{0:.0f}'.format(len(np.unique(results_ref['cluster_labels']))))


    # Crop the data
    if 0:
        results_ref['data'] = \
            results_ref['data'][np.random.randint(results_ref['data'].shape[0],
                                                  size=500),
                                :]


    print('\nPlotting cluster analysis...')
    plot_cluster_data(results_ref['data_reduced'][::DATA_PLOT_FREQ],
                      colors=results_ref['cluster_labels'][::DATA_PLOT_FREQ],
                      filename=FILENAME_PLOT,
                      show=SHOW_PLOTS)

    print('\nPlotting cluster analysis...')
    if 0:
        plot_cluster_data(results_ref['data'][:, (0,4,3)],
                          colors=results_ref['cluster_labels'],
                          filename=FILENAME_PLOT.replace('.png','_data.png'),
                          #labels=['Glon [deg]', 'Glat [deg]', 'FWHM [km/s]',],
                          labels=['Glon [deg]', 'Velocity [km/s]', 'FWHM [km/s]',],
                          show_tick_labels=True,
                          zlim=[-10,100],
                          show=SHOW_PLOTS)
    else:
        plot_cluster_data(results_ref['data'][:, (0,1,4)][::DATA_PLOT_FREQ],
                          colors=results_ref['cluster_labels'][::DATA_PLOT_FREQ],
                          filename=FILENAME_PLOT.replace('.png','_data_glon_glat_vel.png'),
                          #labels=['Glon [deg]', 'Glat [deg]', 'FWHM [km/s]',],
                          labels=['Glon [deg]','Glat [deg]','Velocity [km/s]',],
                          show_tick_labels=True,
                          zlim=[-10,400],
                          show=SHOW_PLOTS)
        plot_cluster_data(results_ref['data'][:, (0,3,4)][::DATA_PLOT_FREQ],
                          colors=results_ref['cluster_labels'][::DATA_PLOT_FREQ],
                          filename=FILENAME_PLOT.replace('.png','_data_glon_fwhm_vel.png'),
                          #labels=['Glon [deg]', 'Glat [deg]', 'FWHM [km/s]',],
                          labels=['Glon [deg]','FWHM [km/s]','Velocity [km/s]',],
                          show_tick_labels=True,
                          zlim=[-10,400],
                          show=SHOW_PLOTS)

    plot_ppv(results_ref['data'][:, (0,1,4)],
             colors=results_ref['cluster_labels'],
             filename_base=FILENAME_PPV_BASE,
             labels=['Glon [deg]','Glat [deg]','Velocity [km/s]',],
             show_tick_labels=True,
             #xlim=[240,320],
             #ylim=[-90,-10],
             #zlim=[-10,400],
             #zlim=[-10,400],
             show=SHOW_PLOTS,
             )

    # Plot the decomposed HI map with the data
    if PLOT_NHI and not LOAD_REFORMATTED_DATA:
        print('\nPlotting N(HI) maps...')
        cube, header = get_cube()
        plot_nhi_maps(results_dict,
                      limits=PLOT_LIMITS,
                      cube_data=cube,
                      header=header,
                      show=SHOW_PLOTS,
                      velocity_range=HI_VEL_RANGE,
                      )

        plot_nhi_maps(results_dict,
                      limits=PLOT_LIMITS,
                      cube_data=cube,
                      header=header,
                      show=SHOW_PLOTS,
                      velocity_range=HI_VEL_RANGE,
                      save_pdf=True,
                      )

    if PLOT_NHI:
        print('\nPlotting N(HI) maps...')
        cube, header = get_cube()
        plot_cluster_nhi_panels(results_ref=results_ref,
                                colors=results_ref['cluster_labels'],
                                cube=cube,
                                header=header,
                                limits=PLOT_LIMITS,
                                load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
                                velocity_range=HI_VEL_RANGE,
                                show=SHOW_PLOTS,
                                )
        plot_cluster_nhi_panels(results_ref=results_ref,
                                colors=results_ref['cluster_labels'],
                                cube=cube,
                                header=header,
                                limits=PLOT_LIMITS,
                                load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
                                velocity_range=HI_VEL_RANGE,
                                show=SHOW_PLOTS,
                                save_pdf=True,
                                )
    if PLOT_VEL:
        cube, header = get_cube()
        plot_cluster_vel_panels(results_ref=results_ref,
                                colors=results_ref['cluster_labels'],
                                cube=cube,
                                header=header,
                                limits=PLOT_LIMITS,
                                load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
                                velocity_range=HI_VEL_RANGE,
                                show=SHOW_PLOTS)
        plot_cluster_vel_panels(results_ref=results_ref,
                                colors=results_ref['cluster_labels'],
                                cube=cube,
                                header=header,
                                limits=PLOT_LIMITS,
                                load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
                                velocity_range=HI_VEL_RANGE,
                                save_pdf=True,
                                show=SHOW_PLOTS)
def main():
    ''' Script to cluster gaussian components found from AGD.

    '''

    #os.chdir('/home/ezbc/research/magellanic_stream/scripts/gausspy_decomp/')

    from localmodule import get_decomposed_data, perform_PCA

    # Set the constants
    DIR_DECOMP = '/d/bip3/ezbc/multicloud/data/decomposition/'
    DIR_FIGURE = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_DATA = 'agd_multicloud_data.pickle'
    FILENAME_TRAIN = DIR_DECOMP + 'agd_multicloud_train.pickle'
    FILENAME_TRAIN_DECOMPOSED = \
        DIR_DECOMP + 'agd_multicloud_train_decomp.pickle'
    FILENAME_DECOMPOSED = DIR_DECOMP + 'agd_multicloud_decomp.pickle'
    FILENAME_DECOMPOSED_CROP = \
        DIR_DECOMP + 'agd_multicloud_decomp_crop.pickle'
    FILENAME_DECOMP_REFORMAT = \
        DIR_DECOMP + 'agd_multicloud_decomp_reformat.pickle'
    FILENAME_CLUSTERS = DIR_DECOMP + 'agd_multicloud_clusters.pickle'
    FILENAME_PPV_BASE = DIR_FIGURE + '/ppv/ppv'
    FILENAME_PLOT = DIR_FIGURE + 'pca.png'

    CROP_LIMITS = [285, 290, -39, -35]
    CROP_LIMITS = [310, 270, -70, -20]
    PLOT_LIMITS = [300, 270, -20, -60]
    #PLOT_LIMITS = [300, 270, -20, -0]
    PLOT_LIMITS = None
    #HI_VEL_RANGE = [100, 500]
    HI_VEL_RANGE = [-100, 100]
    PLOT_NHI = 1
    PLOT_VEL = 1
    SHOW_PLOTS = 0
    DATA_PLOT_FREQ = 50

    # Clustering constants
    N_PC = 5
    N_CLUSTERS = 5
    #CLUSTER_METHOD = 'kmeans'
    CLUSTER_METHOD = 'spectral'
    #CLUSTER_METHOD = 'dbscan'

    # Load cropped decomp data?
    LOAD_DECOMP_CROP = 0
    CLOBBER_CROPPED = 0
    LOAD_REFORMATTED_DATA = 0
    LOAD_SYNTHETIC_CUBE = 0
    LOAD_CLUSTERS = 0

    # Remove cropped file?
    if not LOAD_REFORMATTED_DATA:
        if CLOBBER_CROPPED:
            os.system('rm -rf ' + FILENAME_DECOMPOSED_CROP)

        # Load the results
        if os.path.isfile(FILENAME_DECOMPOSED_CROP) and LOAD_DECOMP_CROP:
            print('\nLoading cropped file...')
            results_dict = \
                get_decomposed_data(FILENAME_DATA,
                                    filename_decomposed=\
                                        FILENAME_DECOMPOSED_CROP,
                                    load=True,
                                    )
        elif LOAD_DECOMP_CROP:
            print('\nLoading full results to be cropped...')

            # crop decomposed data
            results_dict = \
                get_decomposed_data(FILENAME_DATA,
                                    filename_decomposed=FILENAME_DECOMPOSED,
                                    load=True,
                                    )

            if 1:
                reload_wcs_positions(results_dict)
                pickle.dump(results_dict, open(FILENAME_DATA, 'w'))

            print('\nCreating cropped file...')
            results_dict = crop_results(results_dict,
                                        FILENAME_DECOMPOSED_CROP,
                                        limits=CROP_LIMITS)
        else:
            print('\nLoading full cube results...')
            # load entire dataset
            results_dict = \
                get_decomposed_data(FILENAME_DATA,
                                    filename_decomposed=FILENAME_DECOMPOSED,
                                    load=True,
                                    )

    if 0:
        FILENAME_DATA = DIR_DECOMP + 'agd_multicloud_data.pickle'
        data_dict = get_data(load=1, filename=FILENAME_DATA)
        plot_spectra(results_dict, data_dict)

    if LOAD_REFORMATTED_DATA and os.path.isfile(FILENAME_DECOMP_REFORMAT):
        results_ref = pickle.load(open(FILENAME_DECOMP_REFORMAT, 'rb'))
    else:
        results_ref = reformat_results(results_dict)

        pickle.dump(results_ref, open(FILENAME_DECOMP_REFORMAT, 'wb'))

    print('\nPerforming PCA...')
    results_ref['data_reduced'] = get_PCA(results_ref['data'],
                                          n_components=N_PC)

    print('\nNumber of components to be ' + \
          'clustered: {0:.0f}'.format(len(results_ref['data'])))
    if LOAD_CLUSTERS and os.path.isfile(FILENAME_CLUSTERS):
        results_ref['cluster_labels'] = \
            pickle.load(open(FILENAME_CLUSTERS, 'rb'))
    else:
        print('\nClustering reduced components...')
        results_ref['cluster_labels'] = \
                get_clusters(results_ref['data_reduced'],
                             n_clusters=N_CLUSTERS,
                             method=CLUSTER_METHOD)
        pickle.dump(results_ref['cluster_labels'],
                    open(FILENAME_CLUSTERS, 'wb'))

    print('\nNumber of unique clusters: ' + \
          '{0:.0f}'.format(len(np.unique(results_ref['cluster_labels']))))

    # Crop the data
    if 0:
        results_ref['data'] = \
            results_ref['data'][np.random.randint(results_ref['data'].shape[0],
                                                  size=500),
                                :]

    print('\nPlotting cluster analysis...')
    plot_cluster_data(results_ref['data_reduced'][::DATA_PLOT_FREQ],
                      colors=results_ref['cluster_labels'][::DATA_PLOT_FREQ],
                      filename=FILENAME_PLOT,
                      show=SHOW_PLOTS)

    print('\nPlotting cluster analysis...')
    if 0:
        plot_cluster_data(
            results_ref['data'][:, (0, 4, 3)],
            colors=results_ref['cluster_labels'],
            filename=FILENAME_PLOT.replace('.png', '_data.png'),
            #labels=['Glon [deg]', 'Glat [deg]', 'FWHM [km/s]',],
            labels=[
                'Glon [deg]',
                'Velocity [km/s]',
                'FWHM [km/s]',
            ],
            show_tick_labels=True,
            zlim=[-10, 100],
            show=SHOW_PLOTS)
    else:
        plot_cluster_data(
            results_ref['data'][:, (0, 1, 4)][::DATA_PLOT_FREQ],
            colors=results_ref['cluster_labels'][::DATA_PLOT_FREQ],
            filename=FILENAME_PLOT.replace('.png', '_data_glon_glat_vel.png'),
            #labels=['Glon [deg]', 'Glat [deg]', 'FWHM [km/s]',],
            labels=[
                'Glon [deg]',
                'Glat [deg]',
                'Velocity [km/s]',
            ],
            show_tick_labels=True,
            zlim=[-10, 400],
            show=SHOW_PLOTS)
        plot_cluster_data(
            results_ref['data'][:, (0, 3, 4)][::DATA_PLOT_FREQ],
            colors=results_ref['cluster_labels'][::DATA_PLOT_FREQ],
            filename=FILENAME_PLOT.replace('.png', '_data_glon_fwhm_vel.png'),
            #labels=['Glon [deg]', 'Glat [deg]', 'FWHM [km/s]',],
            labels=[
                'Glon [deg]',
                'FWHM [km/s]',
                'Velocity [km/s]',
            ],
            show_tick_labels=True,
            zlim=[-10, 400],
            show=SHOW_PLOTS)

    plot_ppv(
        results_ref['data'][:, (0, 1, 4)],
        colors=results_ref['cluster_labels'],
        filename_base=FILENAME_PPV_BASE,
        labels=[
            'Glon [deg]',
            'Glat [deg]',
            'Velocity [km/s]',
        ],
        show_tick_labels=True,
        #xlim=[240,320],
        #ylim=[-90,-10],
        #zlim=[-10,400],
        #zlim=[-10,400],
        show=SHOW_PLOTS,
    )

    # Plot the decomposed HI map with the data
    if PLOT_NHI and not LOAD_REFORMATTED_DATA:
        print('\nPlotting N(HI) maps...')
        cube, header = get_cube()
        plot_nhi_maps(
            results_dict,
            limits=PLOT_LIMITS,
            cube_data=cube,
            header=header,
            show=SHOW_PLOTS,
            velocity_range=HI_VEL_RANGE,
        )

        plot_nhi_maps(
            results_dict,
            limits=PLOT_LIMITS,
            cube_data=cube,
            header=header,
            show=SHOW_PLOTS,
            velocity_range=HI_VEL_RANGE,
            save_pdf=True,
        )

    if PLOT_NHI:
        print('\nPlotting N(HI) maps...')
        cube, header = get_cube()
        plot_cluster_nhi_panels(
            results_ref=results_ref,
            colors=results_ref['cluster_labels'],
            cube=cube,
            header=header,
            limits=PLOT_LIMITS,
            load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
            velocity_range=HI_VEL_RANGE,
            show=SHOW_PLOTS,
        )
        plot_cluster_nhi_panels(
            results_ref=results_ref,
            colors=results_ref['cluster_labels'],
            cube=cube,
            header=header,
            limits=PLOT_LIMITS,
            load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
            velocity_range=HI_VEL_RANGE,
            show=SHOW_PLOTS,
            save_pdf=True,
        )
    if PLOT_VEL:
        cube, header = get_cube()
        plot_cluster_vel_panels(results_ref=results_ref,
                                colors=results_ref['cluster_labels'],
                                cube=cube,
                                header=header,
                                limits=PLOT_LIMITS,
                                load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
                                velocity_range=HI_VEL_RANGE,
                                show=SHOW_PLOTS)
        plot_cluster_vel_panels(results_ref=results_ref,
                                colors=results_ref['cluster_labels'],
                                cube=cube,
                                header=header,
                                limits=PLOT_LIMITS,
                                load_synthetic_cube=LOAD_SYNTHETIC_CUBE,
                                velocity_range=HI_VEL_RANGE,
                                save_pdf=True,
                                show=SHOW_PLOTS)
def plot_nhi_maps(results_dict, limits=None, cube_data=None, header=None,
        load_synthetic_cube=False, show=False, velocity_range=[0, 500],
        save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG_BASE = DIR_FIG + 'nhi_map_data_synth'

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(cube=cube_data,
                                  velocity_axis=velocity_axis,
                                  velocity_range=velocity_range,
                                  )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_dict['velocity_axis']
    if not load_synthetic_cube:
        print('\nCreating synthetic cube...')
        cube_synthetic = create_synthetic_cube(results_dict, cube_data)

        np.save(FILENAME_CUBE_SYNTH, cube_synthetic)
    else:
        print('\nLoading synthetic cube...')
        cube_synthetic = np.load(FILENAME_CUBE_SYNTH)

    # Create N(HI) synthetic
    nhi_synthetic = myia.calculate_nhi(cube=cube_synthetic,
                                       velocity_axis=velocity_axis,
                                       velocity_range=velocity_range,
                                       )

    v_limits = [0, np.max(nhi_data)]
    v_limits = [-1, 41]

    if 0:
        import matplotlib.pyplot as plt
        plt.close(); plt.clf()
        fig, axes = plt.subplots(2,1)
        axes[0].imshow(nhi_data, origin='lower')
        axes[1].imshow(nhi_synthetic, origin='lower')
        plt.show()

    if save_pdf:
        ext = '.pdf'
    else:
        ext = '.png'
    filename_fig = FILENAME_FIG_BASE + ext
    print('\nPlotting N(HI) maps...')
    print(filename_fig)
    # Plot the maps together
    plot_nhi_maps(nhi_data,
                  nhi_synthetic,
                  header=header,
                  #limits=[278, -37, 282, -35],
                  limits=limits,
                  filename=filename_fig,
                  nhi_1_vlimits=v_limits,
                  nhi_2_vlimits=v_limits,
                  show=show,
                  vscale='linear',
                  )
def plot_nhi_maps(results_dict,
                  limits=None,
                  cube_data=None,
                  header=None,
                  load_synthetic_cube=False,
                  show=False,
                  velocity_range=[0, 500],
                  save_pdf=False):

    from mycoords import make_velocity_axis
    from localmodule import plot_nhi_maps, create_synthetic_cube
    import myimage_analysis as myia
    from astropy.io import fits

    # Plot names
    #DIR_FIG = '../../figures/'
    DIR_FIG = '/d/bip3/ezbc/multicloud/figures/decomposition/'
    FILENAME_FIG_BASE = DIR_FIG + 'nhi_map_data_synth'

    # Load HI Cube
    DIR_HI = '../../data_products/hi/'
    DIR_HI = '/d/bip3/ezbc/multicloud/data_products/hi/'
    #FILENAME_CUBE = 'gass_280_-45_1450212515.fits'
    FILENAME_CUBE = 'perseus_hi_galfa_cube_sub_regrid.fits'
    FILENAME_CUBE_SYNTH = DIR_HI + 'cube_synth.npy'

    velocity_axis = make_velocity_axis(header)

    # Create N(HI) data
    nhi_data = myia.calculate_nhi(
        cube=cube_data,
        velocity_axis=velocity_axis,
        velocity_range=velocity_range,
    )

    # Create synthetic cube from fitted spectra
    velocity_axis = results_dict['velocity_axis']
    if not load_synthetic_cube:
        print('\nCreating synthetic cube...')
        cube_synthetic = create_synthetic_cube(results_dict, cube_data)

        np.save(FILENAME_CUBE_SYNTH, cube_synthetic)
    else:
        print('\nLoading synthetic cube...')
        cube_synthetic = np.load(FILENAME_CUBE_SYNTH)

    # Create N(HI) synthetic
    nhi_synthetic = myia.calculate_nhi(
        cube=cube_synthetic,
        velocity_axis=velocity_axis,
        velocity_range=velocity_range,
    )

    v_limits = [0, np.max(nhi_data)]
    v_limits = [-1, 41]

    if 0:
        import matplotlib.pyplot as plt
        plt.close()
        plt.clf()
        fig, axes = plt.subplots(2, 1)
        axes[0].imshow(nhi_data, origin='lower')
        axes[1].imshow(nhi_synthetic, origin='lower')
        plt.show()

    if save_pdf:
        ext = '.pdf'
    else:
        ext = '.png'
    filename_fig = FILENAME_FIG_BASE + ext
    print('\nPlotting N(HI) maps...')
    print(filename_fig)
    # Plot the maps together
    plot_nhi_maps(
        nhi_data,
        nhi_synthetic,
        header=header,
        #limits=[278, -37, 282, -35],
        limits=limits,
        filename=filename_fig,
        nhi_1_vlimits=v_limits,
        nhi_2_vlimits=v_limits,
        show=show,
        vscale='linear',
    )