# We set the maximum allowed difference in the number of fitted components between individual neighboring spectra to 2 sp.max_jump_comps = 2 # We will flag and try to refit all spectra which show jumps in the number of components of more than 2 to at least two direct neighbors sp.n_max_jump_comps = 1 # Maximum difference in offset positions of fit components for grouping. sp.mean_separation = 2. # Maximum difference in FWHM values of fit components for grouping. sp.fwhm_separation = 4. # Start phase 1 of the spatially coherent refitting sp.spatial_fitting() # (Optional) Plot maps of the reduced chi-square values and the number of fitted components # Initialize the 'GaussPyDecompose' class and read in the parameter settings from 'gausspy+.ini'. decompose = GaussPyDecompose(config_file='gausspy+.ini') # Filepath to pickled dictionary of the prepared data. decompose.path_to_pickle_file = sp.path_to_pickle_file # Filepath to the pickled dictionary with the decomposition results path_to_decomp_pickle = os.path.join( 'decomposition_grs', 'gpy_decomposed', 'grs-test_field_g+_fit_fin_sf-p1.pickle') # Load the decomposition results decompose.load_final_results(path_to_decomp_pickle) # Produce a FITS image showing the number of fitted components decompose.produce_component_map() # Produce a FITS image showing the reduced chi-square values decompose.produce_rchi2_map() # (Optional) Plot some of the spectra and the decomposition results
filenames = glob.glob( "decomposition/gpy_decomposed/smc_HI_cube_askap_sub_*_g+_fit_fin.pickle") fileprefs = [f.split("_g+_fit")[0] for f in filenames] fileprefs = [f.split("sub_")[1] for f in fileprefs] donenames = glob.glob( "decomposition/gpy_maps/smc_HI_cube_askap_sub_*_g+_component_map_MW.fits") doneprefs = [f.split("_g+_fit")[0] for f in donenames] doneprefs = [f.split("sub_")[1] for f in doneprefs] prefs = [f for f in fileprefs if f not in doneprefs] for i, filestr in enumerate(prefs): # Initialize the 'GaussPyDecompose' class and read in the parameter settings from 'gausspy+.ini'. decompose = GaussPyDecompose(config_file="gausspy+.ini") # Filepath to pickled dictionary of the prepared data. decompose.path_to_pickle_file = os.path.join( 'decomposition', 'gpy_prepared', "smc_HI_cube_askap_sub_" + filestr + '.pickle') # # # # Filepath to pickled dictionary of the prepared data. path_to_pickled_file = decompose.path_to_pickle_file # Filepath to pickled dictionary with the decomposition results path_to_decomp_pickle = os.path.join( "decomposition", "gpy_decomposed", "smc_HI_cube_askap_sub_" + filestr + "_g+_fit_fin.pickle", ) print(path_to_decomp_pickle)
if (filename != 'decomposition/gpy_prepared/smc_HI_cube_askap_sub_40.pickle' ) and ( filename != 'decomposition/gpy_prepared/smc_HI_cube_askap_sub_16.pickle') and ( filename != 'decomposition/gpy_prepared/smc_HI_cube_askap_sub_24.pickle' ) and ( filename != 'decomposition/gpy_prepared/smc_HI_cube_askap_sub_32.pickle'): filestr = filename.split(".pickle")[0] filestr = filestr.split("sub_")[1] # Filepath to pickled dictionary of the prepared data. # Initialize the 'GaussPyDecompose' class and read in the parameter settings from 'gausspy+.ini'. decompose = GaussPyDecompose(config_file="gausspy+.ini") decompose.path_to_pickle_file = filename # First smoothing parameter decompose.alpha1 = 1.13 # Second smoothing parameter decompose.alpha2 = 2.06 # We set the upper limit for the reduced chi-square deliberately to a low value to enforce the best fitting results for each individual spectrum. decompose.rchi2_limit = 2.0 # Suffix for the filename of the pickled dictionary with the decomposition results. decompose.suffix = "_g+" # Start the decomposition. decompose.decompose() # (Optional) Produce a FITS image showing the number of fitted components decompose.produce_component_map()
def main(): # Initialize the 'GaussPyDecompose' class and read in the parameter settings from 'gausspy+.ini'. decompose = GaussPyDecompose(config_file='gausspy+.ini') # The following lines will override the corresponding parameter settings defined in 'gausspy+.ini'. # Filepath to pickled dictionary of the prepared data. decompose.path_to_pickle_file = os.path.join('decomposition_grs', 'gpy_prepared', 'grs-test_field.pickle') # First smoothing parameter decompose.alpha1 = 2.58 # Second smoothing parameter decompose.alpha2 = 5.14 # Suffix for the filename of the pickled dictionary with the decomposition results. decompose.suffix = '_g+' # Start the decomposition. decompose.decompose() # (Optional) Produce a FITS image showing the number of fitted components decompose.produce_component_map() # (Optional) Produce a FITS image showing the reduced chi-square values decompose.produce_rchi2_map() # (Optional) Plot some of the spectra and the decomposition results # Filepath to pickled dictionary of the prepared data. path_to_pickled_file = decompose.path_to_pickle_file # Filepath to pickled dictionary with the decomposition results path_to_decomp_pickle = os.path.join('decomposition_grs', 'gpy_decomposed', 'grs-test_field_g+_fit_fin.pickle') # Directory in which the plots are saved. path_to_plots = os.path.join('decomposition_grs', 'gpy_plots') # Here we select a subregion of the data cube, whose spectra we want to plot. pixel_range = {'x': [30, 34], 'y': [25, 29]} plot_spectra(path_to_pickled_file, path_to_plots=path_to_plots, path_to_decomp_pickle=path_to_decomp_pickle, signal_ranges=True, pixel_range=pixel_range)
# @Author: riener # @Date: 2019-04-02T17:42:46+02:00 # @Filename: decompose--grs.py # @Last modified by: riener # @Last modified time: 31-05-2019 import os from gausspyplus.decompose import GaussPyDecompose from gausspyplus.plotting import plot_spectra # Initialize the 'GaussPyDecompose' class and read in the parameter settings from 'gausspy+.ini'. decompose = GaussPyDecompose(config_file='gausspy+.ini') # The following lines will override the corresponding parameter settings defined in 'gausspy+.ini'. # Filepath to pickled dictionary of the prepared data. decompose.path_to_pickle_file = os.path.join('decomposition_grs', 'gpy_prepared', 'grs-test_field.pickle') # First smoothing parameter decompose.alpha1 = 2.58 # Second smoothing parameter decompose.alpha2 = 5.14 # Suffix for the filename of the pickled dictionary with the decomposition results. decompose.suffix = '_g+' # Start the decomposition. decompose.decompose() # (Optional) Produce a FITS image showing the number of fitted components decompose.produce_component_map()
def main(): # Initialize the 'SpatialFitting' class and read in the parameter settings from 'gausspy+.ini'. sp = SpatialFitting(config_file='gausspy+.ini') # The following lines will override the corresponding parameter settings defined in 'gausspy+.ini'. # filepath to the pickled dictionary of the prepared data sp.path_to_pickle_file = os.path.join('decomposition_grs', 'gpy_prepared', 'grs-test_field.pickle') # Filepath to the pickled dictionary of the decomposition results sp.path_to_decomp_file = os.path.join('decomposition_grs', 'gpy_decomposed', 'grs-test_field_g+_fit_fin.pickle') # Try to refit blended fit components sp.refit_blended = True # Try to refit spectra with negative residual features sp.refit_neg_res_peak = True # Try to refit broad fit components sp.refit_broad = True # Flag spectra with non-Gaussian distributed residuals sp.flag_residual = True # Do not try to refit spectra with non-Gaussian distributed residuals sp.refit_residual = False # Try to refit spectra for which the number of fit components is incompatible with its direct neighbors sp.refit_ncomps = True # We set the maximum allowed difference in the number of fitted components compared to the weighted median of all immediate neighbors to 1 sp.max_diff_comps = 1 # We set the maximum allowed difference in the number of fitted components between individual neighboring spectra to 2 sp.max_jump_comps = 2 # We will flag and try to refit all spectra which show jumps in the number of components of more than 2 to at least two direct neighbors sp.n_max_jump_comps = 1 # Maximum difference in offset positions of fit components for grouping. sp.mean_separation = 2. # Maximum difference in FWHM values of fit components for grouping. sp.fwhm_separation = 4. # Start phase 1 of the spatially coherent refitting sp.spatial_fitting() # (Optional) Plot maps of the reduced chi-square values and the number of fitted components # Initialize the 'GaussPyDecompose' class and read in the parameter settings from 'gausspy+.ini'. decompose = GaussPyDecompose(config_file='gausspy+.ini') # Filepath to pickled dictionary of the prepared data. decompose.path_to_pickle_file = sp.path_to_pickle_file # Filepath to the pickled dictionary with the decomposition results path_to_decomp_pickle = os.path.join( 'decomposition_grs', 'gpy_decomposed', 'grs-test_field_g+_fit_fin_sf-p1.pickle') # Load the decomposition results decompose.load_final_results(path_to_decomp_pickle) # Produce a FITS image showing the number of fitted components decompose.produce_component_map() # Produce a FITS image showing the reduced chi-square values decompose.produce_rchi2_map() # (Optional) Plot some of the spectra and the decomposition results # Filepath to pickled dictionary of the prepared data. path_to_pickled_file = sp.path_to_pickle_file # Directory in which the plots are saved. path_to_plots = os.path.join('decomposition_grs', 'gpy_plots') # Here we select a subregion of the data cube, whose spectra we want to plot. pixel_range = {'x': [30, 34], 'y': [25, 29]} plot_spectra(path_to_pickled_file, path_to_plots=path_to_plots, path_to_decomp_pickle=path_to_decomp_pickle, signal_ranges=True, pixel_range=pixel_range)
def gpp_parameters(self): # This code creates a temporary fits file of the data called "gpp-temp.fits" that GaussPy+ will use to decompose. self.y_data = np.reshape(self.y_data, (self.y_data.shape[0], 1, 1)) self.y_data.shape hdu = fits.PrimaryHDU(self.y_data) CRVAL3 = self.x_data[0] CDELT3 = self.x_data[1] - self.x_data[0] hdu1 = fits.HDUList([hdu]) hdu1.writeto("gpp-temp.fits", overwrite=True) # This is taken directly from the GaussPy+ documentation, with the signal-to-noise ratio, minimum FWHM, and # the gpp-temp.fits file used. prepare = GaussPyPrepare() prepare.path_to_file = os.path.abspath("gpp-temp.fits") prepare.p_limit = 0.02 prepare.pad_channels = 2 prepare.signal_mask = True prepare.min_channels = 100 prepare.mask_out_ranges = [] prepare.snr = self.sig prepare.significance = 5.0 prepare.snr_noise_spike = self.sig data_location = (0, 0) prepared_spectrum = prepare.return_single_prepared_spectrum( data_location) decompose = GaussPyDecompose() decompose.two_phase_decomposition = True decompose.alpha1 = 2.58 decompose.alpha2 = 5.14 decompose.improve_fitting = True decompose.exclude_mean_outside_channel_range = True decompose.min_fwhm = self.min decompose.max_fwhm = 64. decompose.snr = self.sig decompose.snr_fit = None decompose.significance = 3.0 decompose.snr_negative = None decompose.min_pvalue = 0.01 decompose.max_amp_factor = 1.1 decompose.refit_neg_res_peak = True decompose.refit_broad = True decompose.refit_blended = True decompose.separation_factor = 0.8493218 decompose.fwhm_factor = 2. decompose.single_prepared_spectrum = prepared_spectrum decomposed_test = decompose.decompose() # This stores the parameters of each gaussian component into a single array, in hindsight # I should have made an array of arrays (each one containing the parameters of each component). self.params = np.concatenate( (np.array(decomposed_test["amplitudes_fit"][0]), (np.array(decomposed_test["fwhms_fit"][0]) * CDELT3), ((np.array(decomposed_test["means_fit"][0]) * CDELT3) + CRVAL3))) # This is the number of components that the spectrum has been decomposed into. self.components = int(len(self.params) / 3.0)