print(' - Using the griddata method %s.' % method) final[method] = [] # Which data to use for source in analysis_data_sources: print('Using the %s data source' % source) if source == 'finalmaps': # Get the final map out from the wave simulation mc = euv_wave_data['finalmaps'] # Accumulate the data in space and time to increase the signal # to noise ratio print(' - Performing spatial summing of HPC data.') mc = mapcube_tools.accumulate(mapcube_tools.superpixel(mc, spatial_summing), temporal_summing) if develop is not None: aware_utils.write_movie(mc, img_filepath + '_accummulated_data') # Swing the position of the start of the longitudinal # unwrapping for ils, longitude_start in enumerate(longitude_starts): # Which angle to start the longitudinal unwrapping transform_hpc2hg_parameters['longitude_start'] = longitude_start # Which version of AWARE to use if aware_version == 0: # # AWARE version 0 - first do the image processing # to isolate the wave front, then do the transformation into # heliographic co-ordinates to measure the wavefront. # print(' - Performing AWARE v0 image processing.')
# Transform parameters used to convert HPC image data to HG data. # The HPC data is transformed to HG using the location below as the # "pole" around which the data is transformed transform_hpc2hg_parameters['epi_lon'] = euv_wave_data['epi_lon'] * u.deg transform_hpc2hg_parameters['epi_lat'] = euv_wave_data['epi_lat'] * u.deg # Storage for the results from all methods and polynomial fits print(' - Using the griddata method %s.' % griddata_method) # Accumulate the data in space and time to increase the signal # to noise ratio print(' - Performing spatial summing of HPC data.') mc = mapcube_tools.accumulate(mapcube_tools.superpixel(hpc_maps, spatial_summing), temporal_summing) if develop is not None: aware_utils.write_movie(mc, img_filepath + '_accummulated_data') # Swing the position of the start of the longitudinal # unwrapping for ils, longitude_start in enumerate(longitude_starts): # Which angle to start the longitudinal unwrapping transform_hpc2hg_parameters['longitude_start'] = longitude_start # Which version of AWARE to use if aware_version == 0: # # AWARE version 0 - first do the image processing # to isolate the wave front, then do the transformation into # heliographic co-ordinates to measure the wavefront. # print(' - Performing AWARE v0 image processing.')
def processing(mc, radii=[[11, 11]*u.degree], clip_limit=None, histogram_clip=[0.0, 99.], func=np.sqrt, three_d=False, develop=None): """ Image processing steps used to isolate the EUV wave from the data. Use this part of AWARE to perform the image processing steps that segment propagating features that brighten new pixels as they propagate. Parameters ---------- mc : sunpy.map.MapCube radii : list of lists. Each list contains a pair of numbers that describe the radius of the median filter and the closing operation histogram_clip clip_limit : func : three_d : develop : """ # Define the disks that will be used on all the images. # The first disk in each pair is the disk that is used by the median # filter. The second disk is used by the morphological closing # operation. disks = [] for r in radii: e1 = (r[0]/mc[0].scale.x).to('pixel').value # median circle radius - across wavefront e3 = (r[1]/mc[0].scale.x).to('pixel').value # closing circle width - across wavefront disks.append([disk(e1), disk(e3)]) # For the dump images rstring = '' for r in radii: z = '%i_%i__' % (r[0].value, r[1].value) rstring += z # Calculate the persistence new = mapcube_tools.persistence(mc) if develop is not None: develop_filepaths = {} filename = develop['img'] + '_persistence_mc.mp4' print('\nWriting persistence movie to {:s}'.format(filename)) aware_utils.write_movie(new, filename) filename = develop['dat'] + '_persistence_mc.pkl' develop_filepaths['persistence_mc'] = filename print('\nWriting persistence mapcube to {:s}'.format(filename)) f = open(filename, 'wb') pickle.dump(new, f) f.close() # Calculate the running difference new = mapcube_tools.running_difference(new) if develop is not None: filename = develop['img'] + '_rdpi_mc.mp4' print('\nWriting RDPI movie to {:s}'.format(filename)) aware_utils.write_movie(new, filename) filename = develop['dat'] + '_rdpi_mc.pkl' develop_filepaths['rdpi_mc'] = filename print('\nWriting RDPI mapcube to {:s}'.format(filename)) f = open(filename, 'wb') pickle.dump(new, f) f.close() # Storage for the processed mapcube. new_mc = [] # Only want positive differences, so everything lower than zero # should be set to zero mc_data = func(new.as_array()) mc_data[mc_data < 0.0] = 0.0 # Clip the data to be within a range, and then normalize it. if clip_limit is None: cl = np.nanpercentile(mc_data, histogram_clip) mc_data[mc_data > cl[1]] = cl[1] mc_data = (mc_data - cl[0]) / (cl[1]-cl[0]) # Get rid of NaNs nans_here = np.logical_not(np.isfinite(mc_data)) nans_replaced = deepcopy(mc_data) nans_replaced[nans_here] = 0.0 # Clean the data to isolate the wave front. Use three dimensional # operations from scipy.ndimage. This approach should get rid of # more noise and have better continuity in the time-direction. final = np.zeros_like(mc_data, dtype=np.float32) # Do the cleaning and isolation operations on multiple length-scales, # and add up the final results. nr = deepcopy(nans_replaced) # Use three-dimensional filters for j, d in enumerate(disks): pancake = np.swapaxes(np.tile(d[0], (3, 1, 1)), 0, -1) print('\n', nr.shape, pancake.shape, '\n', 'started median filter.') nr = _apply_median_filter(nr, d[0], three_d) if develop is not None: filename = develop['dat'] + '_np_median_dc_{:n}.npy'.format(j) develop_filepaths['np_median_dc'] = filename print('\nWriting results of median filter to {:s}'.format(filename)) f = open(filename, 'wb') np.save(f, nr) f.close() print(' started grey closing.') nr = _apply_closing(nr, d[0], three_d) if develop is not None: filename = develop['dat'] + '_np_closing_dc_{:n}.npy'.format(j) develop_filepaths['np_closing_dc'] = filename print('\nWriting results of closing to {:s}'.format(filename)) f = open(filename, 'wb') np.save(f, nr) f.close() # Sum all the final += nr*1.0 # If in development mode, now dump out the meta's and the nans if develop: filename = develop['dat'] + '_np_meta.pkl' develop_filepaths['np_meta'] = filename print('\nWriting all meta data information to {:s}'.format(filename)) f = open(filename, 'wb') pickle.dump(mc.all_meta(), f) f.close() filename = develop['dat'] + '_np_nans.npy' develop_filepaths['np_nans'] = filename print('\nWriting all nans to {:s}'.format(filename)) f = open(filename, 'wb') np.save(f, nans_here) f.close() # Create the list that will be turned in to a mapcube for i, m in enumerate(new): new_map = Map(ma.masked_array(final[:, :, i], mask=nans_here[:, :, i]), m.meta) new_map.plot_settings = deepcopy(m.plot_settings) new_mc.append(new_map) # Return the cleaned mapcube if develop: return Map(new_mc, cube=True), develop_filepaths else: return Map(new_mc, cube=True)
ta.set_xlabel('x (arcsec)', fontsize=fontsize) xtl = ta.axes.xaxis.get_majorticklabels() for l in range(0, len(xtl)): xtl[l].set_fontsize(0.67*fontsize) ta.set_ylabel('y (arcsec)', fontsize=fontsize) ytl = ta.axes.yaxis.get_majorticklabels() for l in range(0, len(ytl)): ytl[l].set_fontsize(0.67*fontsize) plt.tight_layout() plt.show() plt.savefig(os.path.expanduser(image_filepath)) plt.close('all') n = np_median_dc.shape[2] mc = [] for i in range(0, n): mc.append(sunpy.map.Map(np_median_dc[:, :, i], np_meta[i])) mc = sunpy.map.Map(mc, cube=True) aware_utils.write_movie(mc, image_filepath + 'median_0') n = np_closing_dc.shape[2] mc = [] for i in range(0, n): mc.append(sunpy.map.Map(np_closing_dc[:, :, i], np_meta[i])) mc = sunpy.map.Map(mc, cube=True) aware_utils.write_movie(mc, 'closing0')