def reduce_and_save(filename, add_noise=False, rms_noise=0.001, output_path="", cube_output=None, nsig=3, slicewise_noise=True): ''' Load the cube in and derive the property arrays. ''' if add_noise: if rms_noise is None: raise TypeError("Must specify value of rms noise.") cube, hdr = getdata(filename, header=True) # Optionally scale noise by 1/10th of the 98th percentile in the cube if rms_noise == 'scaled': rms_noise = 0.1*np.percentile(cube[np.isfinite(cube)], 98) from scipy.stats import norm if not slicewise_noise: cube += norm.rvs(0.0, rms_noise, cube.shape) else: spec_shape = cube.shape[0] slice_shape = cube.shape[1:] for i in range(spec_shape): cube[i, :, :] += norm.rvs(0.0, rms_noise, slice_shape) sc = SpectralCube(data=cube, wcs=WCS(hdr)) mask = LazyMask(np.isfinite, sc) sc = sc.with_mask(mask) else: sc = filename reduc = Mask_and_Moments(sc, scale=rms_noise) reduc.make_mask(mask=reduc.cube > nsig * reduc.scale) reduc.make_moments() reduc.make_moment_errors() # Remove .fits from filename save_name = filename.split("/")[-1][:-4] reduc.to_fits(output_path+save_name) # Save the noisy cube too if add_noise: if cube_output is None: reduc.cube.hdu.writeto(output_path+save_name) else: reduc.cube.hdu.writeto(cube_output+save_name)
kernel = beam.as_tophat_kernel(pixscale) kernel_pix = (kernel.array > 0).sum() for i in ProgressBar(mask.shape[0]): mask[i] = nd.binary_opening(mask[i], kernel) mask[i] = nd.binary_closing(mask[i], kernel) mask[i] = mo.remove_small_objects(mask[i], min_size=kernel_pix, connectivity=2) mask[i] = mo.remove_small_holes(mask[i], min_size=kernel_pix, connectivity=2) # Each region must contain a point above the peak_snr labels, num = nd.label(mask, np.ones((3, 3, 3))) for n in range(1, num + 1): pts = np.where(labels == n) if np.nanmax(snr[pts]) < peak_snr: mask[pts] = False masked_cube = cube.with_mask(mask) # Save the cube masked_cube.write("{}.masked.fits".format(name)) # Now make reduc = Mask_and_Moments(masked_cube, scale=noise.scale) reduc.make_moments() reduc.make_moment_errors() reduc.to_fits(os.path.join("moments/", name))
for i in ProgressBar(mask.shape[0]): mask[i] = nd.binary_opening(mask[i], kernel) mask[i] = nd.binary_closing(mask[i], kernel) mask[i] = mo.remove_small_objects(mask[i], min_size=kernel_pix, connectivity=2) mask[i] = mo.remove_small_holes(mask[i], min_size=kernel_pix, connectivity=2) # Each region must contain a point above the peak_snr labels, num = nd.label(mask, np.ones((3, 3, 3))) for n in range(1, num + 1): pts = np.where(labels == n) if np.nanmax(snr[pts]) < peak_snr: mask[pts] = False masked_cube = cube.with_mask(mask) # Save the cube masked_cube.write("{}.masked.fits".format(name)) # Now make reduc = Mask_and_Moments(masked_cube, scale=noise.scale) reduc.make_moments() reduc.make_moment_errors() reduc.to_fits(os.path.join("moments/", name))
faces=faces, timesteps='last', verbose=False) # If the AMR moments path doesn't exist, make the moment arrays and save. if not os.path.exists(amrmoments_path): os.mkdir(amrmoments_path) for face in faces: for fid in fiducials_amr[face]: fid_name = fiducials_amr[face][fid] mask_mom = Mask_and_Moments(fid_name, scale=0.001 * u.K) mask_mom.make_moments() mask_mom.make_moment_errors() save_name = os.path.splitext(os.path.basename(fid_name))[0] mask_mom.to_fits(os.path.join(amrmoments_path, save_name)) # Now run the distances AMR vs. none. statistics = copy(statistics_list) statistics.append("DeltaVariance_Centroid_Curve") statistics.append("DeltaVariance_Centroid_Slope") print "Statistics to run: %s" % (statistics) num_statistics = len(statistics) for face in faces: for fid in ProgressBar([3, 4]): distances = np.zeros( (len(statistics), len(fiducials_amr[face].keys())))
def reduce_and_save(filename, add_noise=False, regrid_linewidth=False, rms_noise=0.001 * u.K, output_path="", cube_output=None, nsig=3, slicewise_noise=True): ''' Load the cube in and derive the property arrays. ''' if add_noise or regrid_linewidth: sc = SpectralCube.read(filename) if add_noise: if rms_noise is None: raise TypeError("Must specify value of rms noise.") cube = sc.filled_data[:].value # Optionally scale noise by 1/10th of the 98th percentile in the # cube if rms_noise == 'scaled': rms_noise = 0.1 * \ np.percentile(cube[np.isfinite(cube)], 98) * sc.unit from scipy.stats import norm if not slicewise_noise: cube += norm.rvs(0.0, rms_noise.value, cube.shape) else: spec_shape = cube.shape[0] slice_shape = cube.shape[1:] for i in range(spec_shape): cube[i, :, :] += norm.rvs(0.0, rms_noise.value, slice_shape) sc = SpectralCube(data=cube * sc.unit, wcs=sc.wcs, meta={"BUNIT": "K"}) mask = LazyMask(np.isfinite, sc) sc = sc.with_mask(mask) if regrid_linewidth: # Normalize the cubes to have the same linewidth # channels_per_sigma=20 scales to the largest mean line width in # SimSuite8 (~800 km/s; Design 22). So effectively everything is # "smoothed" to have this line width # Intensities are normalized by their 95% value. sc = preprocessor(sc, min_intensity=nsig * rms_noise, norm_intensity=True, norm_percentile=95, channels_per_sigma=20) else: sc = filename # Run the same signal masking procedure that was used for the # COMPLETE cubes if add_noise: # The default settings were set based on these cubes sc = make_signal_mask(sc)[0] reduc = Mask_and_Moments(sc, scale=rms_noise) if not add_noise: reduc.make_mask(mask=reduc.cube > nsig * reduc.scale) reduc.make_moments() reduc.make_moment_errors() # Remove .fits from filename save_name = os.path.splitext(os.path.basename(filename))[0] reduc.to_fits(os.path.join(output_path, save_name)) # Save the noisy cube too if add_noise or regrid_linewidth: save_name += ".fits" if cube_output is None: sc.hdu.writeto(os.path.join(output_path, save_name)) else: sc.hdu.writeto(os.path.join(cube_output, save_name))
if run_moments: # Save moment arrays of the 256 cubes. moments_path = os.path.join(path_to_256, "moments") if not os.path.exists(moments_path): os.mkdir(moments_path) for face in fiducials_256: fid_name = fiducials_256[face][256][0] mask_mom = Mask_and_Moments(fid_name, scale=0.001 * u.K) mask_mom.make_moments() mask_mom.make_moment_errors() save_name = os.path.splitext(os.path.basename(fid_name))[0] mask_mom.to_fits(os.path.join(moments_256_path, save_name)) if run_distances: # Set which stats to run. statistics = copy(statistics_list) # statistics.remove("Dendrogram_Hist") # statistics.remove("Dendrogram_Num") statistics.append("DeltaVariance_Centroid_Curve") statistics.append("DeltaVariance_Centroid_Slope") statistics.remove("Tsallis") all_distances = {0: None, 1: None, 2: None} for face in fiducials.keys():
if run_moments: # Save moment arrays of the 256 cubes. moments_path = os.path.join(path_to_256, "moments") if not os.path.exists(moments_path): os.mkdir(moments_path) for face in fiducials_256: fid_name = fiducials_256[face][256][0] mask_mom = Mask_and_Moments(fid_name, scale=0.001 * u.K) mask_mom.make_moments() mask_mom.make_moment_errors() save_name = os.path.splitext(os.path.basename(fid_name))[0] mask_mom.to_fits(os.path.join(moments_256_path, save_name)) if run_distances: # Set which stats to run. statistics = copy(statistics_list) # statistics.remove("Dendrogram_Hist") # statistics.remove("Dendrogram_Num") statistics.append("DeltaVariance_Centroid_Curve") statistics.append("DeltaVariance_Centroid_Slope") statistics.remove("Tsallis") all_distances = {0: None, 1: None, 2: None}