def test_cpsd(): plt.close('all') print() print( '>>>>>>>>>>>>>>>>>>> Test cpsd (Cross Spectral Power Density) <<<<<<<<<<<<<<<<<<<<<<<<' ) flap.delete_data_object('*') print("**** Generating 8 random data, 1 million points each.") d = flap.get_data('TESTDATA', name='TEST-1-[1-8]', options={ 'Signal': 'Random', 'Length': 1 }, object_name='TESTDATA') print("**** Calculating all cpsd") flap.cpsd('TESTDATA', options={ 'Norm': True, 'Interval': 50, 'Log': True, 'Res': 10, 'Range': [100, 1e5] }, output_name='TESTDATA_cpsd') flap.abs_value('TESTDATA_cpsd', output_name='TESTDATA_cpsd_abs') print( "**** Plotting coherency between channels 1-2 and its significance level." ) plt.figure() flap.plot('TESTDATA_cpsd_abs', axes='Frequency', slicing={ 'Row (Ref)': 1, 'Row': 2 }, options={ 'Log y': True, 'Log x': True, 'Error': False }) flap.error_value('TESTDATA_cpsd_abs').plot(slicing={ 'Row (Ref)': 1, 'Row': 2 }) plt.figure() print( "**** Plotting mean coherence in 1e4-1e5 frequency range as a function of row index." ) flap.slice_data('TESTDATA_cpsd_abs', slicing={ 'Frequency': flap.Intervals(1e4, 1e5) }, summing={ 'Frequency': 'Mean' }).plot(axes='Row (Ref)', options={'Y sep': 1.5})
def test_get_data(): try: flap.get_data('CAMERA_APSD') apsd_available = True except: apsd_available = False if (not apsd_available): try: # flap.get_data('W7X_CAMERA',exp_id="20181018.032", name="AEQ20_EDICAM_ROIP1", coordinates={'Time':[3,4]}, object_name="EDI_ROIP1") flap.get_data('W7X_CAMERA', exp_id="20181018.012", name="AEQ21_PHOTRON_ROIP1", coordinates={'Time':[6.05,6.25]}, no_data=False, object_name="CAMERA") except Exception as e: raise e flap.list_data_objects() # flap.plot("CAMERA",plot_type='anim-image',axes=['Image y','Image x','Time'],options={'Wait':0.01,'Clear':True}) print("Slicing start") flap.slice_data('CAMERA', slicing={'Image x':flap.Intervals(0,4,step=5),'Image y':flap.Intervals(0,4,step=5)}, summing={'Interval(Image x) sample index':'Mean','Interval(Image y) sample index':'Mean'}, output_name='CAMERA_sliced') print("Slicing stop") # flap.plot("CAMERA_sliced", # plot_type='anim-image', # axes=['Start Image y in int(Image y)','Start Image x in int(Image x)','Time'], # options={'Wait':0.01,'Clear':True, 'Z range':[0,3000]}) print("*** APSD start") start = time.time() flap.apsd("CAMERA_sliced",coordinate='Time',options={'Res':200,'Range':[0,1e4]},output_name='CAMERA_APSD') stop = time.time() print('**** APSD STOP') print("**** Calculation time: {:5.2f} second".format(stop-start)) plt.close('all') # flap.plot('CAMERA_APSD', # slicing={'Start Image y in int(Image y)':50}, # plot_type='image', # axes=['Frequency','Start Image x in int(Image x)'], # options={'Z range':[0,5],'Aspect':'auto'}) # plt.figure() return flap.plot('CAMERA_APSD', plot_type='anim-image', axes=['Frequency','Start Image x in int(Image x)','Start Image y in int(Image y)'], options={'Z range':[0,5],'Aspect':'auto','Wait':0.1}) flap.list_data_objects()
def test_binning(): print() print() print('>>>>>>> Test image binning through multi-slice<<<<<<<<<<<') print("**** Generating a sequence of test images") flap.get_data('TESTDATA', name='VIDEO', object_name='TEST_VIDEO', options={ 'Length': 0.05, 'Samplerate': 1e3, 'Width': 500, 'Height': 800, 'Image': 'Gauss', 'Spotsize': 10 }) print("***** Showing one image") plt.figure() flap.plot('TEST_VIDEO', slicing={'Time': 30e-3 / 3}, plot_type='image', axes=['Image x', 'Image y'], options={ 'Clear': True, 'Interpolation': None, 'Aspect': 'equal' }) flap.slice_data('TEST_VIDEO', slicing={ 'Image x': flap.Intervals(0, 4, step=5), 'Image y': flap.Intervals(0, 9, step=10) }, summing={ 'Interval(Image x) sample index': 'Mean', 'Interval(Image y) sample index': 'Mean' }, output_name='TEST_VIDEO_binned') print("***** Showing one image of the (5,10) binned video ") plt.figure() flap.plot('TEST_VIDEO_binned', slicing={'Time': 30e-3 / 3}, plot_type='image', axes=['Image x', 'Image y'], options={ 'Clear': True, 'Interpolation': None, 'Aspect': 'equal' }) flap.list_data_objects()
def test_select_multislice(): plt.close('all') print() print( '>>>>>>>>>>>>>>>>>>> Test select on maxima and multi slice <<<<<<<<<<<<<<<<<<<<<<<<' ) flap.delete_data_object('*') d = flap.get_data('TESTDATA', name='TEST-1-1', object_name='TEST-1-1', options={'Length': 0.050}) print( "**** Selecting 100 microsec long intervals around the maxima of the signal." ) d_int = flap.select_intervals('TEST-1-1', coordinate='Time', options={ 'Select': None, 'Length': 0.0001, 'Event': { 'Type': 'Max-weight', 'Threshold': 1, 'Thr-type': 'Sigma' } }, plot_options={'All points': True}, output_name='SELECT') flap.list_data_objects() d_int.plot(axes=['__Data__', 0], plot_type='scatter', options={'Force': True}) if (d_int is not None): print("**** Overplotting the signals in the selected intervals.") flap.slice_data('TEST-1-1', slicing={'Time': d_int}, output_name='TEST-1-1_sliced') flap.list_data_objects() plt.figure() n_int = d_int.shape[0] for i in range(n_int): flap.plot('TEST-1-1_sliced', slicing={'Interval(Time)': i}, axes='Rel. Time in int(Time)')
def test_simple_slice(): print() print("\n>>>>>>>>>>>>>>>>>>> Test simple slice <<<<<<<<<<<<<<<<<<<<<<<<") flap.delete_data_object('*') print("**** Reading all test signals for time range [0,0.001]") d = flap.get_data('TESTDATA', name='TEST-*', options={'Scaling': 'Volt'}, object_name='TESTDATA', coordinates={'Time': [0, 0.001]}) print("**** Adding Device coordinates") flap.add_coordinate('TESTDATA', coordinates=['Device x', 'Device z', 'Device y']) print("**** Storage contents before slice") flap.list_data_objects() print("**** Slicing with {'Signal name': 'TEST-1-*'}") flap.slice_data('TESTDATA', slicing={'Signal name': 'TEST-1-*'}, output_name='TESTDATA_slice') print("**** Sliced object") flap.list_data_objects(name='TESTDATA_slice')
def test_resample(): plt.close('all') print() print(">>>>>>>>>>>>> Test signal resampling (interpolation) <<<<<<<<<<<") flap.delete_data_object('*') print( "**** Generating two test signals with different sampling frequency.") flap.get_data('TESTDATA', name='TEST-1-1', options={ 'Scaling': 'Volt', 'Frequency': 1e3, 'Samplerate': 1e6 }, object_name='TEST-1MHz', coordinates={'Time': [0, 0.001]}) flap.get_data('TESTDATA', name='TEST-1-1', options={ 'Scaling': 'Volt', 'Frequency': 1.5e3, 'Samplerate': 3e6 }, object_name='TEST-3MHz', coordinates={'Time': [0, 0.001]}) print("\n***** Resampling from lower to higher frequency.") plt.figure() flap.plot('TEST-1MHz', axes='Time', plot_options={'marker': 'o'}) flap.plot('TEST-3MHz', plot_options={'marker': 'o'}) flap.slice_data('TEST-1MHz', slicing={'Time': flap.get_data_object('TEST-3MHz')}, options={'Interpol': 'Linear'}, output_name='TEST-1MHz_resample') flap.plot('TEST-1MHz_resample', plot_options={'marker': 'x'}) print("\n***** Resampling from higher to lower frequency.") plt.figure() flap.plot('TEST-1MHz', axes='Time', plot_options={'marker': 'o'}) flap.plot('TEST-3MHz', plot_options={'marker': 'o'}) flap.slice_data('TEST-3MHz', slicing={'Time': flap.get_data_object('TEST-1MHz')}, options={'Interpol': 'Linear'}, output_name='TEST-3MHz_resample') flap.plot('TEST-3MHz_resample', plot_options={'marker': 'x'}) print("\n***** Cutting parts.") plt.figure() flap.slice_data( 'TEST-1MHz', slicing={'Time': flap.Intervals([1e-4, 5e-4], [2e-4, 7e-4])}, options={'Slice': 'Simple'}, output_name='TEST-1MHz_parts') flap.plot('TEST-1MHz_parts', axes='Time', plot_options={'marker': 'o'}) flap.list_data_objects()
def test_apsd(): plt.close('all') print() print( '>>>>>>>>>>>>>>>>>>> Test apsd (Auto Power Spectral Density) <<<<<<<<<<<<<<<<<<<<<<<<' ) flap.delete_data_object('*') #plt.close('all') print( '**** Generating test signals with frequency changing from channel to channel.' ) d = flap.get_data('TESTDATA', name='TEST*', object_name='TEST-1_S', options={ 'Signal': 'Sin', 'F': [1e3, 1e4], 'Length': 1. }) print('**** Calculating 150 APSDs, each 1 million sample.') print('**** APSD START') start = time.time() flap.apsd('TEST-1_S', output_name='TEST-1_APSD_Sin1', options={ 'Res': 12, 'Int': 10 }) stop = time.time() print('**** APSD STOP') print("**** Calculation time: {:5.2f} second/signal".format( (stop - start) / 150.)) plt.figure() flap.plot('TEST-1_APSD_Sin1', slicing={'Row': 1}, axes='Frequency', options={ 'All': True, 'X range': [0, 5e3] }) plt.title('TEST-1-1_APSD_Sin1') print("**** Testing with a complex signal.") flap.delete_data_object('*') d = flap.get_data('TESTDATA', name='TEST-1-1', object_name='TEST-1-1_CS', options={'Signal': 'Complex-Sin'}) flap.apsd('TEST-1-1_CS', coordinate='Time', output_name='TEST-1-1_APSD_Complex-Sin', options={ 'Res': 10, 'Range': [-1e5, 1e5] }) flap.slice_data('TEST-1-1_APSD_Complex-Sin', slicing={'Frequency': flap.Intervals(-5e3, 5e3)}, output_name='TEST-1-1_APSD_Complex-Sin_sliced') flap.list_data_objects() plt.figure() flap.plot('TEST-1-1_APSD_Complex-Sin_sliced', axes='Frequency', options={'All': True}) plt.title('TEST-1-1_APSD_Complex-Sin_sliced') print( "**** Testing interval selection in apsd. APSD from 8 intervals, each 80 ms long." ) d = flap.get_data('TESTDATA', name='TEST-1-1', object_name='TEST-1-1', options={ 'Signal': 'Sin', 'Length': 1 }) intervals = flap.Intervals(0, 0.08, step=0.1, number=8) flap.apsd('TEST-1-1', output_name='TEST-1-1_APSD', intervals=intervals, options={ 'Res': 12, 'Int': 10 }) plt.figure() flap.plot('TEST-1-1_APSD', options={'X range': [0, 5e3]})
def calculate_nstx_gpi_crosscorrelation(exp_id=None, time_range=None, add_flux=None, reference_pixel=None, reference_flux=None, reference_position=None, reference_area=None, filter_low=None, filter_high=None, filter_design='Chebyshev II', trend=['Poly',2], frange=None, taurange=[-500e-6,500e-6], taures=2.5e-6, interval_n=11, filename=None, options=None, cache_data=False, normalize_signal=False, normalize=True, #Calculate correlation if True (instead of covariance) plot=False, plot_acf=False, axes=['Image x', 'Image y', 'Time lag'] ): if time_range is None: print('The time range needs to set for the calculation.') print('There is no point of calculating the entire time range.') return else: if (type(time_range) is not list and len(time_range) != 2): raise TypeError('time_range needs to be a list with two elements.') if exp_id is not None: print("\n------- Reading NSTX GPI data --------") if cache_data: try: d=flap.get_data_object_ref(exp_id=exp_id,object_name='GPI') except: print('Data is not cached, it needs to be read.') d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: raise ValueError('The experiment ID needs to be set.') if reference_flux is not None or add_flux: d.add_coordinate(coordinates='Flux r',exp_id=exp_id) #Normalize the data for the maximum cloud distribution if normalize_signal: normalizer=flap_nstx.analysis.calculate_nstx_gpi_norm_coeff(exp_id=exp_id, # Experiment ID f_high=1e2, # Low pass filter frequency in Hz design=filter_design, # IIR filter design (from scipy) test=False, # Testing input filter_data=True, # IIR LPF the data time_range=None, # Timer range for the averaging in ms [t1,t2] calc_around_max=False, # Calculate the average around the maximum of the GPI signal time_window=50., # The time window for the calc_around_max calculation cache_data=True, verbose=False, ) d.data = d.data/normalizer.data #This should be checked to some extent, it works with smaller matrices #SLicing data to the input time range flap.slice_data('GPI',exp_id=exp_id, slicing={'Time':flap.Intervals(time_range[0],time_range[1])}, output_name='GPI_SLICED') #Filtering the signal since we are in time-space not frequency space if frange is not None: filter_low=frange[0] filter_high=frange[1] if filter_low is not None or filter_high is not None: if filter_low is not None and filter_high is None: filter_type='Highpass' if filter_low is None and filter_high is not None: filter_type='Lowpass' if filter_low is not None and filter_high is not None: filter_type='Bandpass' flap.filter_data('GPI_SLICED',exp_id=exp_id, coordinate='Time', options={'Type':filter_type, 'f_low':filter_low, 'f_high':filter_high, 'Design':filter_design}, output_name='GPI_SLICED_FILTERED') if reference_pixel is None and reference_position is None and reference_flux is None: calculate_acf=True else: calculate_acf=False if not calculate_acf: flap_nstx.analysis.calculate_nstx_gpi_reference('GPI_SLICED_FILTERED', exp_id=exp_id, reference_pixel=reference_pixel, reference_area=reference_area, reference_position=reference_position, reference_flux=reference_flux, output_name='GPI_REF') flap.ccf('GPI_SLICED_FILTERED',exp_id=exp_id, ref='GPI_REF', coordinate='Time', options={'Resolution':taures, 'Range':taurange, 'Trend':trend, 'Interval':interval_n, 'Normalize':normalize, }, output_name='GPI_CCF') if plot: if not plot_acf: object_name='GPI_CCF' else: object_name='GPI_ACF' flap.plot(object_name, exp_id=exp_id, plot_type='animation', axes=axes, options={'Plot units': {'Time lag':'us'}, 'Z range':[0,1]},)
import flap_nstx from flap_nstx.analysis import * flap_nstx.register() import flap_mdsplus flap_mdsplus.register('NSTX_MDSPlus') thisdir = os.path.dirname(os.path.realpath(__file__)) fn = os.path.join(thisdir,"flap_nstx.cfg") flap.config.read(file_name=fn) import scipy exp_id=139901 time_range=[0.307,0.308] flap.get_data('NSTX_GPI', exp_id=exp_id,name='',object_name='GPI') d=flap.slice_data('GPI', slicing={'Time':flap.Intervals(time_range[0],time_range[1])}) sample0=d.coordinate('Sample')[0][0,0,0] frame_a=np.asarray(flap.slice_data('GPI', slicing={'Sample':sample0}, output_name='GPI_FRAME1').data, dtype='float32') frame_b=np.asarray(flap.slice_data('GPI', slicing={'Sample':sample0+1}, output_name='GPI_FRAME2').data, dtype='float32') winsize = 10 # pixels searchsize = 10 # pixels, search in image B overlap = 5 # pixels dt = 2.5e-6 # sec u0, v0, sig2noise = process.extended_search_area_piv(frame_a.astype(np.int32), frame_b.astype(np.int32), window_size=winsize, overlap=overlap, dt=dt,
def show_nstx_gpi_video(exp_id=None, #Shot number time_range=None, #Time range to show the video in, if not set, the enire shot is shown z_range=None, #Range for the contour/color levels, if not set, min-max is divided logz=False, #Plot the image in a logarithmic coloring plot_filtered=False, #Plot a high pass (100Hz) filtered video normalize=None, #Normalize the video by dividing it with a processed GPI signal # options: 'Time dependent' (LPF filtered) (recommended) # 'Time averaged' (LPF filtered and averaged for the time range) # 'Simple' (Averaged) normalizer_time_range=None, #Time range for the time dependent normalization subtract_background=False, #Subtract the background from the image (mean of the time series) plot_flux=False, #Plot the flux surfaces onto the video plot_separatrix=False, #Plot the separatrix onto the video plot_limiter=False, #Plot the limiter of NSTX from EFIT flux_coordinates=False, #Plot the signal as a function of magnetic coordinates device_coordinates=False, #Plot the signal as a function of the device coordinates new_plot=True, #Plot the video into a new figure window save_video=False, #Save the video into an mp4 format video_saving_only=False, #Saving only the video, not plotting it prevent_saturation=False, #Prevent saturation of the image by restarting the colormap colormap='gist_ncar', #Colormap for the plotting cache_data=True, #Try to load the data from the FLAP storage ): if exp_id is not None: print("\n------- Reading NSTX GPI data --------") if cache_data: try: d=flap.get_data_object_ref(exp_id=exp_id,object_name='GPI') except: print('Data is not cached, it needs to be read.') d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') object_name='GPI' else: raise ValueError('The experiment ID needs to be set.') if time_range is None: print('time_range is None, the entire shot is plotted.') slicing=None else: if (type(time_range) is not list and len(time_range) != 2): raise TypeError('time_range needs to be a list with two elements.') #time_range=[time_range[0]/1000., time_range[1]/1000.] slicing={'Time':flap.Intervals(time_range[0],time_range[1])} d=flap.slice_data(object_name, exp_id=exp_id, slicing=slicing, output_name='GPI_SLICED') object_name='GPI_SLICED' if plot_filtered: print("**** Filtering GPI ****") d=flap.filter_data(object_name, exp_id=exp_id, output_name='GPI_FILTERED',coordinate='Time', options={'Type':'Highpass', 'f_low':1e2, 'Design':'Chebyshev II'}) object_name='GPI_FILTERED' if normalize is not None: print("**** Normalizing GPI ****") d=flap.get_data_object_ref(object_name) if normalize in ['Time averaged','Time dependent', 'Simple']: if normalize == 'Time averaged': coefficient=flap_nstx.analysis.calculate_nstx_gpi_norm_coeff(exp_id=exp_id, time_range=normalizer_time_range, f_high=1e2, design='Chebyshev II', filter_data=True, cache_data=True, ) if normalize == 'Time dependent': coefficient=flap.filter_data('GPI', exp_id=exp_id, output_name='GPI_LPF', coordinate='Time', options={'Type':'Lowpass', 'f_high':1e2, 'Design':'Chebyshev II'}) if slicing is not None: coefficient=coefficient.slice_data(slicing=slicing) if normalize == 'Simple': coefficient=flap.slice_data(object_name,summing={'Time':'Mean'}) data_obj=copy.deepcopy(d) data_obj.data = data_obj.data/coefficient.data flap.add_data_object(data_obj, 'GPI_DENORM') object_name='GPI_DENORM' else: raise ValueError('Normalize can either be "Time averaged","Time dependent" or "Simple".') if subtract_background: #DEPRECATED, DOESN'T DO MUCH HELP print('**** Subtracting background ****') d=flap.get_data_object_ref(object_name, exp_id=exp_id) background=flap.slice_data(object_name, exp_id=exp_id, summing={'Time':'Mean'}) data_obj=copy.deepcopy(d) data_obj.data=data_obj.data/background.data flap.add_data_object(data_obj, 'GPI_BGSUB') object_name='GPI_BGSUB' if ((plot_flux or plot_separatrix) and not flux_coordinates): print('Gathering MDSPlus EFIT data.') oplot_options={} if plot_separatrix: flap.get_data('NSTX_MDSPlus', name='\EFIT01::\RBDRY', exp_id=exp_id, object_name='SEP X OBJ' ) flap.get_data('NSTX_MDSPlus', name='\EFIT01::\ZBDRY', exp_id=exp_id, object_name='SEP Y OBJ' ) oplot_options['path']={'separatrix':{'Data object X':'SEP X OBJ', 'Data object Y':'SEP Y OBJ', 'Plot':True, 'Color':'red'}} if plot_flux: d=flap.get_data('NSTX_MDSPlus', name='\EFIT02::\PSIRZ', exp_id=exp_id, object_name='PSI RZ OBJ' ) oplot_options['contour']={'flux':{'Data object':'PSI RZ OBJ', 'Plot':True, 'Colormap':None, 'nlevel':51}} #oplot_options['line']={'trial':{'Horizontal':[[0.200,'red'],[0.250,'blue']], # 'Vertical':[[1.450,'red'],[1.500,'blue']], # 'Plot':True # }} else: oplot_options=None if flux_coordinates: print("**** Adding Flux r coordinates") d.add_coordinate(coordinates='Flux r',exp_id=exp_id) x_axis='Flux r' y_axis='Device z' if plot_separatrix: oplot_options={} oplot_options['line']={'separatrix':{'Vertical':[[1.0,'red']], 'Plot':True}} elif device_coordinates: x_axis='Device R' y_axis='Device z' else: x_axis='Image x' y_axis='Image y' if new_plot: plt.figure() if save_video: if time_range is not None: video_filename='NSTX_GPI_'+str(exp_id)+'_'+str(time_range[0])+'_'+str(time_range[1])+'.mp4' else: video_filename='NSTX_GPI_'+str(exp_id)+'_FULL.mp4' else: video_filename=None if video_saving_only: save_video=True if z_range is None: d=flap.get_data_object_ref(object_name, exp_id=exp_id) z_range=[d.data.min(),d.data.max()] if z_range[1] < 0: raise ValueError('All the values are negative, Logarithmic plotting is not allowed.') if logz and z_range[0] <= 0: print('Z range should not start with 0 when logarithmic Z axis is set. Forcing it to be 1 for now.') z_range[0]=1. if not save_video: flap.plot(object_name,plot_type='animation', exp_id=exp_id, axes=[x_axis,y_axis,'Time'], options={'Z range':z_range,'Wait':0.0,'Clear':False, 'Overplot options':oplot_options, 'Colormap':colormap, 'Log z':logz, 'Equal axes':True, 'Prevent saturation':prevent_saturation, 'Plot units':{'Time':'s', 'Device R':'m', 'Device z':'m'} }) else: if video_saving_only: import matplotlib current_backend=matplotlib.get_backend() matplotlib.use('agg') waittime=0. else: waittime=1./24. waittime=0. flap.plot(object_name,plot_type='anim-image', exp_id=exp_id, axes=[x_axis,y_axis,'Time'], options={'Z range':z_range,'Wait':0.0,'Clear':False, 'Overplot options':oplot_options, 'Colormap':colormap, 'Equal axes':True, 'Waittime':waittime, 'Video file':video_filename, 'Video format':'mp4', 'Prevent saturation':prevent_saturation, }) if video_saving_only: import matplotlib matplotlib.use(current_backend)
def show_nstx_gpi_video_frames(exp_id=None, time_range=None, start_time=None, n_frame=20, logz=False, z_range=[0,512], plot_filtered=False, normalize=False, cache_data=False, plot_flux=False, plot_separatrix=False, flux_coordinates=False, device_coordinates=False, new_plot=True, save_pdf=False, colormap='gist_ncar', save_for_paraview=False, colorbar_visibility=True ): if time_range is None and start_time is None: print('time_range is None, the entire shot is plotted.') if time_range is not None: if (type(time_range) is not list and len(time_range) != 2): raise TypeError('time_range needs to be a list with two elements.') if start_time is not None: if type(start_time) is not int and type(start_time) is not float: raise TypeError('start_time needs to be a number.') if not cache_data: #This needs to be enhanced to actually cache the data no matter what flap.delete_data_object('*') if exp_id is not None: print("\n------- Reading NSTX GPI data --------") if cache_data: try: d=flap.get_data_object_ref(exp_id=exp_id,object_name='GPI') except: print('Data is not cached, it needs to be read.') d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') object_name='GPI' else: raise ValueError('The experiment ID needs to be set.') if time_range is None: time_range=[start_time,start_time+n_frame*2.5e-6] if normalize: flap.slice_data(object_name, slicing={'Time':flap.Intervals(time_range[0]-1/1e3*10, time_range[1]+1/1e3*10)}, output_name='GPI_SLICED_FOR_FILTERING') norm_obj=flap.filter_data('GPI_SLICED_FOR_FILTERING', exp_id=exp_id, coordinate='Time', options={'Type':'Lowpass', 'f_high':1e3, 'Design':'Elliptic'}, output_name='GAS_CLOUD') norm_obj.data=np.flip(norm_obj.data,axis=0) norm_obj=flap.filter_data('GAS_CLOUD', exp_id=exp_id, coordinate='Time', options={'Type':'Lowpass', 'f_high':1e3, 'Design':'Elliptic'}, output_name='GAS_CLOUD') norm_obj.data=np.flip(norm_obj.data,axis=0) coefficient=flap.slice_data('GAS_CLOUD', exp_id=exp_id, slicing={'Time':flap.Intervals(time_range[0],time_range[1])}, output_name='GPI_GAS_CLOUD').data data_obj=flap.slice_data('GPI', exp_id=exp_id, slicing={'Time':flap.Intervals(time_range[0],time_range[1])}) data_obj.data = data_obj.data/coefficient flap.add_data_object(data_obj, 'GPI_SLICED_DENORM') object_name='GPI_SLICED_DENORM' if plot_filtered: print("**** Filtering GPI") object_name='GPI_FILTERED' try: flap.get_data_object_ref(object_name, exp_id=exp_id) except: flap.filter_data(object_name, exp_id=exp_id, coordinate='Time', options={'Type':'Highpass', 'f_low':1e2, 'Design':'Chebyshev II'}, output_name='GPI_FILTERED') #Data is in milliseconds if plot_flux or plot_separatrix: print('Gathering MDSPlus EFIT data.') oplot_options={} if plot_separatrix: flap.get_data('NSTX_MDSPlus', name='\EFIT01::\RBDRY', exp_id=exp_id, object_name='SEP X OBJ' ) flap.get_data('NSTX_MDSPlus', name='\EFIT01::\ZBDRY', exp_id=exp_id, object_name='SEP Y OBJ' ) if plot_flux: d=flap.get_data('NSTX_MDSPlus', name='\EFIT01::\PSIRZ', exp_id=exp_id, object_name='PSI RZ OBJ' ) x_axis='Device R' y_axis='Device z' else: oplot_options=None if flux_coordinates: print("**** Adding Flux r coordinates") d.add_coordinate(coordinates='Flux r',exp_id=exp_id) x_axis='Flux r' y_axis='Device z' elif device_coordinates: x_axis='Device R' y_axis='Device z' if (not device_coordinates and not plot_separatrix and not flux_coordinates): x_axis='Image x' y_axis='Image y' if start_time is not None: start_sample_num=flap.slice_data(object_name, slicing={'Time':start_time}).coordinate('Sample')[0][0,0] if n_frame == 30: ny=6 nx=5 if n_frame == 20: ny=5 nx=4 gs=GridSpec(nx,ny) for index_grid_x in range(nx): for index_grid_y in range(ny): plt.subplot(gs[index_grid_x,index_grid_y]) if start_time is not None: slicing={'Sample':start_sample_num+index_grid_x*ny+index_grid_y} else: time=time_range[0]+(time_range[1]-time_range[0])/(n_frame-1)*(index_grid_x*ny+index_grid_y) slicing={'Time':time} d=flap.slice_data(object_name, slicing=slicing, output_name='GPI_SLICED') slicing={'Time':d.coordinate('Time')[0][0,0]} if plot_flux: flap.slice_data('PSI RZ OBJ',slicing=slicing,output_name='PSI RZ SLICE',options={'Interpolation':'Linear'}) oplot_options['contour']={'flux':{'Data object':'PSI RZ SLICE', 'Plot':True, 'Colormap':None, 'nlevel':51}} if plot_separatrix: flap.slice_data('SEP X OBJ',slicing=slicing,output_name='SEP X SLICE',options={'Interpolation':'Linear'}) flap.slice_data('SEP Y OBJ',slicing=slicing,output_name='SEP Y SLICE',options={'Interpolation':'Linear'}) oplot_options['path']={'separatrix':{'Data object X':'SEP X SLICE', 'Data object Y':'SEP Y SLICE', 'Plot':True, 'Color':'red'}} visibility=[True,True] if index_grid_x != nx-1: visibility[0]=False if index_grid_y != 0: visibility[1]=False flap.plot('GPI_SLICED', plot_type='contour', exp_id=exp_id, axes=[x_axis,y_axis,'Time'], options={'Z range':z_range, 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Plot units':{'Device R':'m', 'Device z':'m'}, 'Axes visibility':visibility, 'Colormap':colormap, 'Colorbar':colorbar_visibility, 'Overplot options':oplot_options, }, plot_options={'levels':255}, ) actual_time=d.coordinate('Time')[0][0,0] #plt.title(str(exp_id)+' @ '+f"{actual_time*1000:.4f}"+'ms') plt.title(f"{actual_time*1000:.3f}"+'ms') if save_pdf: if time_range is not None: plt.savefig('NSTX_GPI_video_frames_'+str(exp_id)+'_'+str(time_range[0])+'_'+str(time_range[1])+'_nf_'+str(n_frame)+'.pdf') else: plt.savefig('NSTX_GPI_video_frames_'+str(exp_id)+'_'+str(start_time)+'_nf_'+str(n_frame)+'.pdf')
import numpy as np import scipy nstx_gpi_generate_synthetic_data(exp_id=1, time=0.0001, output_name='test', poloidal_velocity=10e3, radial_velocity=5e3, poloidal_size=0.02, radial_size=0.03, start_position=[1.5, 0.3], amplitude=1., gaussian=True, add_background=False) flap.slice_data('test', slicing={'Sample': 2}, output_name='test_0') flap.slice_data('test', slicing={'Sample': 3}, output_name='test_1') #flap.ccf('test_0', 'test_1', # coordinate=['Image x'], # options={'Resolution':1, 'Range':[-63,63], 'Trend removal':None, 'Normalize':True, 'Interval_n': 1}, # output_name='test_01_correlation') flap.ccf('test_0', 'test_1', coordinate=['Image x', 'Image y'], options={ 'Resolution': 1, 'Range': [[-63, 63], [-79, 79]], 'Trend removal': None, 'Normalize': True, 'Interval_n': 1
correlation_evolution = np.zeros([n_frames + 1, n_lag + 1]) time_ranges = np.asarray([ np.arange(0, n_lag + 1) / (n_lag) * (time_range[1] - time_range[0]) + time_range[0], np.arange(0, n_lag + 1) / (n_lag) * (time_range[1] - time_range[0]) + time_range[0] + time_window ]).T flap.get_data('NSTX_GPI', exp_id=exp_id, name='', object_name='GPI') for j in range(n_lag + 1): sample0 = flap.get_data_object_ref('GPI').slice_data( slicing={ 'Time': time_ranges[j, 0] }).coordinate('Sample')[0][0, 0] d = flap.slice_data( 'GPI', slicing={'Sample': flap.Intervals(sample0, sample0 + n_frames)}, output_name='GPI_SLICED') d.data = d.data / np.mean(d.data, axis=0) d = flap_nstx.analysis.detrend_multidim('GPI_SLICED', order=1, coordinates=['Image x', 'Image y'], output_name='GPI_SLICED_DETREND') time = flap.get_data_object_ref('GPI_SLICED_DETREND').coordinate( 'Time')[0][:, 0, 0] time = time - time[0] frame1 = np.asarray(flap.slice_data('GPI_SLICED_DETREND', slicing={ 'Sample': sample0 },
flap_mdsplus.register('NSTX_MDSPlus') thisdir = os.path.dirname(os.path.realpath(__file__)) fn = os.path.join(thisdir, "flap_nstx.cfg") flap.config.read(file_name=fn) import matplotlib.pyplot as plt from matplotlib import path as pltPath import numpy as np from scipy import interpolate from scipy.signal import correlate order = 0 flap.get_data('NSTX_GPI', exp_id=139901, name='', object_name='GPI_RAW') d11 = flap.slice_data('GPI_RAW', slicing={'Time': 0.324}, output_name='GPI_RAW_SLICED') d12 = flap.slice_data('GPI_RAW', slicing={'Time': 0.324 + 2.5e-6}, output_name='GPI_RAW_SLICED_2') trend1 = flap_nstx.analysis.detrend_multidim( data_object='GPI_RAW_SLICED', coordinates=['Image x', 'Image y'], order=order, output_name='GPI_RAW_SLICED_DETREND', return_trend=True) trend2 = flap_nstx.analysis.detrend_multidim( data_object='GPI_RAW_SLICED_2', coordinates=['Image x', 'Image y'], order=order, output_name='GPI_RAW_SLICED_2_DETREND',
def calculate_nstx_gpi_crosspower(exp_id=None, time_range=None, normalize_signal=False, #Normalize the amplitude for the average frame for the entire time range reference_pixel=None, reference_position=None, reference_flux=None, reference_area=None, #In the unit of the reference, [psi,z] if reference_flux is not None fres=1., #in KHz due to data being in ms flog=False, frange=None, interval_n=8., filename=None, options=None, cache_data=False, normalize=False, #Calculate coherency if True plot=False, plot_phase=False, axes=['Image x', 'Image y', 'Frequency'], hanning=True, colormap=None, video_saving_only=False, video_filename=None, save_video=False, comment=None, zlog=False, save_for_paraview=False ): #139901 [300,307] #This function returns the crosspower between a single signal and all the other signals in the GPI. #A separate function is dedicated for multi channel reference channel #e.g 3x3 area and 64x80 resulting 3x3x64x80 cross power spectra #Read data from the cine file if time_range is None: print('The time range needs to set for the calculation.') print('There is no point of calculating the entire time range.') return else: if (type(time_range) is not list and len(time_range) != 2): raise TypeError('time_range needs to be a list with two elements.') if exp_id is not None: print("\n------- Reading NSTX GPI data --------") if cache_data: try: d=flap.get_data_object_ref(exp_id=exp_id,object_name='GPI') except: print('Data is not cached, it needs to be read.') d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: raise ValueError('The experiment ID needs to be set.') if reference_flux is not None: d.add_coordinate(coordinates='Flux r',exp_id=exp_id) #Normalize the data for the maximum cloud distribution if normalize_signal: normalizer=flap_nstx.analysis.calculate_nstx_gpi_norm_coeff(exp_id=exp_id, # Experiment ID f_high=1e2, # Low pass filter frequency in Hz design='Chebyshev II', # IIR filter design (from scipy) test=False, # Testing input filter_data=True, # IIR LPF the data time_range=None, # Timer range for the averaging in ms [t1,t2] calc_around_max=False, # Calculate the average around the maximum of the GPI signal time_window=50., # The time window for the calc_around_max calculation cache_data=True, # verbose=False, ) d.data = d.data/normalizer.data #This should be checked to some extent, it works with smaller matrices #Calculate the crosspower spectra for the timerange between the reference pixel and all the other pixels if reference_pixel is None and reference_position is None and reference_flux is None: calculate_apsd=True print('No reference is defined, returning autopower spectra.') else: calculate_apsd=False reference_signal=flap_nstx.analysis.calculate_nstx_gpi_reference('GPI', exp_id=exp_id, time_range=time_range, reference_pixel=reference_pixel, reference_area=reference_area, reference_position=reference_position, reference_flux=reference_flux, output_name='GPI_REF') flap.slice_data('GPI',exp_id=exp_id, slicing={'Time':flap.Intervals(time_range[0],time_range[1])}, output_name='GPI_SLICED') if calculate_apsd: object_name='GPI_APSD' d=flap.apsd('GPI_SLICED',exp_id=exp_id, coordinate='Time', options={'Resolution':fres, 'Range':frange, 'Logarithmic':flog, 'Interval_n':interval_n, 'Hanning':hanning, 'Trend removal':None, }, output_name=object_name) else: object_name='GPI_CPSD' flap.cpsd('GPI_SLICED',exp_id=exp_id, ref=reference_signal, coordinate='Time', options={'Resolution':fres, 'Range':frange, 'Logarithmic':flog, 'Interval_n':interval_n, 'Hanning':hanning, 'Normalize':normalize, 'Trend removal':None, }, output_name=object_name) flap.abs_value(object_name,exp_id=exp_id, output_name='GPI_CPSD_ABS') flap.phase(object_name,exp_id=exp_id, output_name='GPI_CPSD_PHASE') if not save_video: if plot: if calculate_apsd: object_name='GPI_APSD' else: if plot_phase: object_name='GPI_CPSD_PHASE' else: object_name='GPI_CPSD_ABS' flap.plot(object_name, exp_id=exp_id, plot_type='animation', axes=axes, options={'Force axes':True, 'Colormap':colormap, 'Plot units':{'Device R':'mm', 'Device z':'mm', 'Frequency':'kHz'}, 'Log z':zlog}) else: if video_filename is None: if time_range is not None: video_filename='NSTX_GPI_'+str(exp_id) if calculate_apsd: video_filename+='_APSD' else: video_filename+='_CPSD' if plot_phase: video_filename+='_PHASE' else: video_filename+='_ABS' video_filename+='_'+str(time_range[0])+'_'+str(time_range[1]) if reference_pixel is not None: video_filename+='_PIX_'+str(reference_pixel[0])+'_'+str(reference_pixel[1]) if reference_position is not None: video_filename+='_POS_'+str(reference_position[0])+'_'+str(reference_position[1]) if reference_flux is not None: video_filename+='_FLX_'+str(reference_flux[0])+'_'+str(reference_flux[1]) video_filename+='_FRES_'+str(fres) if comment is not None: video_filename+=comment video_filename+='.mp4' else: video_filename='NSTX_GPI_CPSD_'+str(exp_id)+'_FULL.mp4' if video_saving_only: import matplotlib current_backend=matplotlib.get_backend() matplotlib.use('agg') waittime=0. else: waittime=1. if calculate_apsd: object_name='GPI_APSD' else: if plot_phase: object_name='GPI_CPSD_PHASE' else: object_name='GPI_CPSD_ABS' flap.plot(object_name, exp_id=exp_id, plot_type='anim-contour', axes=axes, options={'Force axes':True, 'Colormap':colormap, 'Plot units':{'Device R':'mm', 'Device z':'mm', }, 'Waittime':waittime, 'Video file':video_filename, 'Video format':'mp4', 'Log z':zlog}) if video_saving_only: import matplotlib matplotlib.use(current_backend)
def test_ccf(): plt.close('all') print() print( '>>>>>>>>>>>>>>>>>>> Test ccf (Cross Correlation Function) <<<<<<<<<<<<<<<<<<<<<<<<' ) flap.delete_data_object('*') print( "**** Generating 10x15 random test signals, 5000 points each, 1 MHz sampling." ) flap.get_data('TESTDATA', name='TEST-*-*', options={ 'Length': 0.005, 'Signal': 'Random' }, object_name='TESTDATA') print("**** Filtering with 10 microsec integrating filter.") flap.filter_data('TESTDATA', coordinate='Time', options={ 'Type': 'Int', 'Tau': 1e-5 }, output_name='TESTDATA_filt') flap.list_data_objects() plt.figure() print("**** Plotting an original and a filtered signal.") flap.plot('TESTDATA', slicing={'Row': 1, 'Column': 1}, axes='Time') flap.plot('TESTDATA_filt', slicing={'Row': 1, 'Column': 1}) print('**** Calculating the 10x15x10x15 CCFs, each 5000 samples.') print('**** CCF START') start = time.time() flap.ccf('TESTDATA_filt', coordinate='Time', options={ 'Trend': 'Mean', 'Range': [-1e-4, 1e-4], 'Res': 1e-5, 'Norm': True }, output_name='CCF') stop = time.time() print('**** CCF STOP') print("**** Calculation time: {:6.3f} ms/signal".format( 1000 * (stop - start) / (10 * 15 * 10 * 15))) flap.list_data_objects() print( "**** Plotting spatiotemporal correlation function at ref row, column 3,3, column 3" ) plt.figure() flap.plot('CCF', slicing={ 'Row (Ref)': 3, 'Column (Ref)': 3, 'Column': 3 }, axes=['Time lag'], plot_type='multi xy') print("**** Slicing TESTDATA_filt for row: 1-3, column:1-4") flap.slice_data('TESTDATA_filt', slicing={ 'Row': [1, 2, 3], 'Column': [1, 2, 3, 4] }, output_name='TESTDATA_filt_3x4') print('**** Calculating CCFs, between original and sliced TESTDATAfilt') print('**** CCF START') flap.ccf('TESTDATA_filt', ref='TESTDATA_filt_3x4', coordinate='Time', options={ 'Trend': 'Mean', 'Range': [-1e-4, 1e-4], 'Res': 1e-5, 'Norm': True }, output_name='CCF_ref') print('**** CCF STOP') flap.list_data_objects() print( "**** Plotting spatiotemporal correlation function at ref row, column 3,3, column 3" ) plt.figure() flap.plot('CCF_ref', slicing={ 'Row (Ref)': 3, 'Column (Ref)': 3, 'Column': 3 }, axes=['Time lag'], plot_type='multi xy')
def plot_results_for_paper(): pearson=False wd=flap.config.get_all_section('Module NSTX_GPI')['Working directory'] #Figure 1 '''NO CODE IS NEEDED''' #Figure 2 '''NO CODE IS NEEDED''' #Figure 3 from flap_nstx.analysis import show_nstx_gpi_video_frames #fig, ax = plt.subplots(figsize=(6.5,5)) if plot[3]: gs=GridSpec(5,2) ax,fig=plt.subplots(figsize=(8.5/2.54,6)) pdf=PdfPages(wd+'/plots/figure_3_139901_basic_plots.pdf') plt.subplot(gs[0,0]) flap.get_data('NSTX_MDSPlus', name='\WF::\DALPHA', exp_id=139901, object_name='DALPHA').plot(options={'Axes visibility':[False,True]}) plt.xlim([0,1.2]) plt.subplot(gs[1,0]) flap.get_data('NSTX_GPI', name='', exp_id=139901, object_name='GPI').slice_data(summing={'Image x':'Mean', 'Image y':'Mean'}).plot(options={'Axes visibility':[False,True]}) plt.xlim([0,1.2]) plt.xlim([0,1.2]) plt.subplot(gs[2,0]) flap.get_data('NSTX_MDSPlus', name='IP', exp_id=139901, object_name='IP').plot(options={'Axes visibility':[False,True]}) plt.xlim([0,1.2]) plt.subplot(gs[3,0]) d=flap_nstx_thomson_data(exp_id=139901, density=True, output_name='DENSITY') dR = d.coordinate('Device R')[0][:,:]-np.insert(d.coordinate('Device R')[0][0:-1,:],0,0,axis=0) LID=np.sum(d.data*dR,axis=0) plt.plot(d.coordinate('Time')[0][0,:],LID) plt.title('Line integrated density') plt.xlabel('Time [s]') plt.ylabel('n_e [m^-2]') plt.xlim([0,1.2]) ax=plt.gca() ax.get_xaxis().set_visible(False) plt.subplot(gs[4,0]) magnetics=flap.get_data('NSTX_MDSPlus', name='\OPS_PC::\\BDOT_L1DMIVVHF5_RAW', exp_id=139901, object_name='MIRNOV') magnetics.coordinates.append(copy.deepcopy(flap.Coordinate(name='Time equi', unit='s', mode=flap.CoordinateMode(equidistant=True), shape = [], start=magnetics.coordinate('Time')[0][0], step=magnetics.coordinate('Time')[0][1]-magnetics.coordinate('Time')[0][0], dimension_list=[0]))) magnetics.filter_data(coordinate='Time equi', options={'Type':'Bandpass', 'f_low':100e3, 'f_high':500e3, 'Design':'Elliptic'}).plot() plt.xlim([0,1.2]) plt.subplot(gs[0,1]) flap.get_data('NSTX_MDSPlus', name='\WF::\DALPHA', exp_id=139901, object_name='DALPHA').plot(options={'Axes visibility':[False,False]}) plt.xlim([0.25,0.4]) plt.subplot(gs[1,1]) flap.get_data('NSTX_GPI', name='', exp_id=139901, object_name='GPI').slice_data(summing={'Image x':'Mean', 'Image y':'Mean'}).plot(options={'Axes visibility':[False,False]}) plt.xlim([0.25,0.4]) plt.subplot(gs[2,1]) flap.get_data('NSTX_MDSPlus', name='IP', exp_id=139901, object_name='IP').plot(options={'Axes visibility':[False,False]}) plt.xlim([0.25,0.4]) plt.subplot(gs[3,1]) d=flap_nstx_thomson_data(exp_id=139901, density=True, output_name='DENSITY') dR = d.coordinate('Device R')[0][:,:]-np.insert(d.coordinate('Device R')[0][0:-1,:],0,0,axis=0) LID=np.sum(d.data*dR,axis=0) plt.plot(d.coordinate('Time')[0][0,:],LID) plt.title('Line integrated density') plt.xlabel('Time [s]') plt.ylabel('n_e [m^-2]') plt.xlim([0.25,0.4]) ax=plt.gca() ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) plt.subplot(gs[4,1]) magnetics=flap.get_data('NSTX_MDSPlus', name='\OPS_PC::\\BDOT_L1DMIVVHF5_RAW', exp_id=139901, object_name='MIRNOV') magnetics.coordinates.append(copy.deepcopy(flap.Coordinate(name='Time equi', unit='s', mode=flap.CoordinateMode(equidistant=True), shape = [], start=magnetics.coordinate('Time')[0][0], step=magnetics.coordinate('Time')[0][1]-magnetics.coordinate('Time')[0][0], dimension_list=[0]))) magnetics.filter_data(coordinate='Time equi', options={'Type':'Bandpass', 'f_low':100e3, 'f_high':500e3, 'Design':'Elliptic'}).plot(slicing={'Time':flap.Intervals(0.25,0.4)}) plt.xlim([0.25,0.4]) ax=plt.gca() ax.get_yaxis().set_visible(False) pdf.savefig() pdf.close() if plot[4]: plt.figure() ax,fig=plt.subplots(figsize=(3.35*2,5.5)) pdf=PdfPages(wd+'/plots/figure_5_139901_0.3249158_30_frame.pdf') show_nstx_gpi_video_frames(exp_id=139901, start_time=0.3249158, n_frame=30, logz=False, z_range=[0,3900], plot_filtered=False, normalize=False, cache_data=False, plot_flux=False, plot_separatrix=True, flux_coordinates=False, device_coordinates=True, new_plot=False, save_pdf=True, colormap='gist_ncar', save_for_paraview=False, colorbar_visibility=True ) pdf.savefig() pdf.close() #Figure 5 if plot[5] or plot[6] or plot[7]: try: d1,d2,d3,d4=pickle.load(open(wd+'/processed_data/fig_6_8_flap_object.pickle','rb')) flap.add_data_object(d1, 'GPI_SLICED_FULL') flap.add_data_object(d2, 'GPI_GAS_CLOUD') flap.add_data_object(d3, 'GPI_SLICED_DENORM_CCF_VEL') flap.add_data_object(d4, 'GPI_CCF_F_BY_F') except: calculate_nstx_gpi_avg_frame_velocity(exp_id=139901, time_range=[0.325-1e-3,0.325+1e-3], plot=False, subtraction_order_for_velocity=1, skip_structure_calculation=False, correlation_threshold=0., pdf=False, nlevel=51, nocalc=False, filter_level=3, normalize_for_size=True, normalize_for_velocity=True, threshold_coeff=1., normalize_f_high=1e3, normalize='roundtrip', velocity_base='cog', return_results=False, plot_gas=True) pickle.dump((flap.get_data_object('GPI_SLICED_FULL'), flap.get_data_object('GPI_GAS_CLOUD'), flap.get_data_object('GPI_SLICED_DENORM_CCF_VEL'), flap.get_data_object('GPI_CCF_F_BY_F')), open(wd+'/processed_data/fig_6_8_flap_object.pickle','wb')) if plot[5]: pdf=PdfPages(wd+'/plots/figure_6_normalization.pdf') times=[0.3245,0.3249560,0.3255] signals=['GPI_SLICED_FULL', 'GPI_GAS_CLOUD', 'GPI_SLICED_DENORM_CCF_VEL'] gs=GridSpec(3,3) plt.figure() ax,fig=plt.subplots(figsize=(3.35,4)) titles=['Raw frame', 'Gas cloud', 'Normalized'] for index_grid_x in range(3): for index_grid_y in range(3): plt.subplot(gs[index_grid_x,index_grid_y]) visibility=[True,True] if index_grid_x != 3-1: visibility[0]=False if index_grid_y != 0: visibility[1]=False # if index_grid_x == 0: # z_range=[0,4096] # elif index_grid_x == 1: # z_range=[0,400] # elif index_grid_x == 2: # z_range=[0,40] z_range=None flap.plot(signals[index_grid_x], plot_type='contour', slicing={'Time':times[index_grid_y]}, axes=['Image x', 'Image y'], options={'Z range':z_range, 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':visibility, #'Colormap':'gist_ncar', 'Colorbar':True, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) if index_grid_x == 0: #ax=plt.gca() plt.title(f"{times[index_grid_y]*1e3:.3f}"+' '+titles[index_grid_x]) else: plt.title(titles[index_grid_x]) pdf.savefig() pdf.close() #Figure 6 if plot[6]: flap.get_data('NSTX_GPI',exp_id=139901, name='', object_name='GPI') flap.slice_data('GPI', slicing={'Time':flap.Intervals(0.3245,0.3255)}, output_name='GPI_SLICED_FULL') data_object_name='GPI_SLICED_DENORM_CCF_VEL' detrended=flap_nstx.analysis.detrend_multidim(data_object_name, exp_id=139901, order=4, coordinates=['Image x', 'Image y'], output_name='GPI_DETREND_VEL') d=copy.deepcopy(flap.get_data_object(data_object_name)) d.data=d.data-detrended.data flap.add_data_object(d,'GPI_TREND') signals=[data_object_name, 'GPI_TREND', 'GPI_DETREND_VEL'] pdf=PdfPages(wd+'/plots/figure_7_trend_subtraction.pdf') gs=GridSpec(1,3) plt.figure() ax,fig=plt.subplots(figsize=(8.5/2.54,2)) for index_grid_x in range(3): plt.subplot(gs[index_grid_x]) visibility=[True,True] if index_grid_x != 0: visibility[1]=False z_range=[0,10] colorbar=False flap.plot(signals[index_grid_x], plot_type='contour', slicing={'Time':0.3249560}, #slicing={'Sample':29808}, axes=['Image x', 'Image y'], options={'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':visibility, #'Colormap':colormap, 'Colorbar':True, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) #fig.tight_layout() pdf.savefig() pdf.close() #Figure 7 if plot[7]: pdf=PdfPages(wd+'/plots/figure_8_CCF_frame_by_frame.pdf') gs=GridSpec(1,3) plt.figure() ax,fig=plt.subplots(figsize=(8.5/2.54,2)) plt.subplot(gs[0]) flap.plot('GPI_SLICED_FULL', plot_type='contour', slicing={'Sample':29806}, axes=['Image x', 'Image y'], options={ 'Z range':[0,4096], 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':[True,True], 'Colormap':'gist_ncar', 'Colorbar':False, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) plt.title("324.959ms") plt.subplot(gs[1]) flap.plot('GPI_SLICED_FULL', plot_type='contour', slicing={'Sample':29807}, axes=['Image x', 'Image y'], options={'Z range':[0,4096], 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':[True,False], 'Colorbar':False, 'Colormap':'gist_ncar', }, plot_options={'levels':51}, ) plt.title("324.961ms") plt.subplot(gs[2]) flap.plot('GPI_CCF_F_BY_F', plot_type='contour', slicing={'Sample':29807, 'Image x':flap.Intervals(-10,10),'Image y':flap.Intervals(-10,10)}, axes=['Image x', 'Image y'], options={ #'Z range':[0,2048], 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':[True,True], #'Colormap':colormap, 'Colorbar':True, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) plt.title("CCF") pdf.savefig() pdf.close() #Figure 8 if plot[8]: #2x2 frames with the found structures during an ELM burst calculate_nstx_gpi_avg_frame_velocity(exp_id=139901, time_range=[0.32495,0.325], plot=False, subtraction_order_for_velocity=4, skip_structure_calculation=False, correlation_threshold=0.5, pdf=True, nlevel=51, nocalc=False, filter_level=5, normalize_for_size=True, normalize_for_velocity=True, threshold_coeff=1., normalize_f_high=1e3, normalize='roundtrip', velocity_base='cog', return_results=False, plot_gas=True, structure_pixel_calc=True, structure_pdf_save=True, test_structures=True ) #Post processing done with illustrator #Figure 9 if plot[9]: #2x3 #Synthetic GPI signal #Postprocessing done with illustrator nstx_gpi_generate_synthetic_data(exp_id=1, time=0.0001, amplitude=1.0, output_name='test', poloidal_velocity=3e3, radial_velocity=0., poloidal_size=0.10, radial_size=0.05, waveform_divider=1, sinusoidal=True) d=flap.get_data_object('test', exp_id=1) d.data=d.data-np.mean(d.data,axis=0) calculate_nstx_gpi_avg_frame_velocity(data_object='test', exp_id=1, time_range=[0.000000,0.00005], plot=False, subtraction_order_for_velocity=1, skip_structure_calculation=False, correlation_threshold=0.5, pdf=True, nlevel=51, nocalc=False, filter_level=5, normalize_for_size=False, normalize_for_velocity=False, threshold_coeff=1., normalize_f_high=1e3, normalize=None, velocity_base='cog', return_results=False, plot_gas=False, structure_pixel_calc=True, structure_pdf_save=True, test_structures=True ) #Figure 10 if plot[10]: #Single shot results calculate_nstx_gpi_avg_frame_velocity(exp_id=139901, time_range=[0.325-2e-3,0.325+2e-3], plot_time_range=[0.325-0.5e-3,0.325+0.5e-3], plot=True, subtraction_order_for_velocity=4, skip_structure_calculation=False, correlation_threshold=0.6, pdf=True, nlevel=51, nocalc=True, gpi_plane_calculation=True, filter_level=5, normalize_for_size=True, normalize_for_velocity=True, threshold_coeff=1., normalize_f_high=1e3, normalize='roundtrip', velocity_base='cog', return_results=False, plot_gas=True, plot_for_publication=True, plot_scatter=False, overplot_average=False, overplot_str_vel=False) #2x3 #Done with Illustrator #Figure 12 if plot[11]: #Conditional averaged results calculate_avg_velocity_results(pdf=True, plot=True, plot_max_only=True, plot_for_publication=True, normalized_velocity=True, subtraction_order=4, normalized_structure=True, opacity=0.5, correlation_threshold=0.6, gpi_plane_calculation=True, plot_scatter=False) #Post processing done with Illustrator #Figure 11 if plot[12]: if pearson: pdf=PdfPages(wd+'/plots/figure_13_pearson_matrix.pdf') pearson=calculate_nstx_gpi_correlation_matrix(calculate_average=False, gpi_plane_calculation=True, window_average=0.050e-3, elm_burst_window=True) data=pearson[:,:,0] variance=pearson[:,:,1] data[10,10]=-1 plt.figure() plt.subplots(figsize=(8.5/2.54,8.5/2.54/1.618)) plt.matshow(data, cmap='seismic') plt.xticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max'], rotation='vertical') plt.yticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max']) plt.colorbar() plt.show() pdf.savefig() plt.figure() plt.subplots(figsize=(8.5/2.54,8.5/2.54/1.618)) variance[10,10]=-1 variance[9,9]=1 plt.matshow(variance, cmap='seismic') #plt.matshow(data, cmap='gist_ncar') plt.xticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max'], rotation='vertical') plt.yticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max']) plt.colorbar() plt.show() pdf.savefig() pdf.close() else: pdf=PdfPages(wd+'/plots/figure_13_dependence.pdf') plt.figure() plt.subplots(figsize=(17/2.54,17/2.54/1.618)) plot_all_parameters_vs_all_other_average(window_average=0.2e-3, symbol_size=0.3, plot_error=True) pdf.savefig() pdf.close()
def show_nstx_gpi_timetrace(exp_id=None, plot_filtered=False, time_range=None, new_plot=False, overplot=False, scale=1.0, save_pdf=False, cache_data=True, ): plot_options={} if time_range is None: print('time_range is None, the entire shot is plot.') slicing_range=None else: if (type(time_range) is not list and len(time_range) != 2): raise TypeError('time_range needs to be a list with two elements.') plot_options['X range']=time_range slicing_range={'Time':flap.Intervals(time_range[0],time_range[1])} if exp_id is not None: print("\n------- Reading NSTX GPI data --------") if cache_data: try: d=flap.get_data_object_ref(exp_id=exp_id,object_name='GPI') except: print('Data is not cached, it needs to be read.') d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: raise ValueError('The experiment ID needs to be set.') flap.slice_data('GPI', #slicing=slicing_range, slicing=slicing_range, summing={'Image x':'Mean','Image y':'Mean'}, output_name='GPI_MEAN') object_name='GPI_MEAN' if plot_filtered: print("**** Filtering GPI") object_name='GPI_MEAN_FILTERED' flap.filter_data('GPI_MEAN',output_name='GPI_MEAN_FILTERED',coordinate='Time', options={'Type':'Highpass', 'f_low':1e2, 'Design':'Chebyshev II'}) #Data is in milliseconds if scale != 1.0: d=flap.get_data_object_ref(object_name, exp_id) d.data=d.data*scale if new_plot and not overplot: plt.figure() elif overplot: plot_options['Force axes']=True else: plt.cla() plot_options['All points']=True flap.plot(object_name, axes=['Time', '__Data__'], exp_id=exp_id, options=plot_options) if save_pdf: if time_range is not None: filename='NSTX_'+str(exp_id)+'_GPI_'+str(time_range[0])+'_'+str(time_range[1])+'_mean.pdf' else: filename='NSTX_'+str(exp_id)+'_GPI_mean.pdf' plt.savefig(filename)
def export_gpi_data_to_paraview(exp_id=None, time_range=None, filename=None, filter_data=True, flux_coordinates=False): if filename is None: filename = 'GPI_FOR_PARAVIEW_' + str(exp_id) + '_' + str( time_range[0]) + '_' + str(time_range[1]) # d=flap.get_data('NSTX_GPI', exp_id=exp_id, name='', object_name='GPI') # d=flap.slice_data('GPI', slicing={'Time':flap.Intervals(time_range[0],time_range[1])}) # if filter_data: # d=flap.filter_data('GPI',exp_id=exp_id, # coordinate='Time', # options={'Type':'Highpass', # 'f_low':1e2, # 'Design':'Chebyshev II'}) # filename=filename+'_FILTERED' # time=d.coordinate('Time')[0] # x=d.coordinate('Device R')[0].flatten() # y=d.coordinate('Device z')[0].flatten() # t=time.flatten() # data=d.data.flatten() # np.savetxt(filename, np.asarray([[x],[y],[1000*t],[data]])[:,0,:].T, delimiter=",", header='x [m], y [m], t [ms], data [a.u.]') d = flap.get_data('NSTX_GPI', exp_id=exp_id, name='', object_name='GPI') if filter_data: d = flap.filter_data('GPI', exp_id=exp_id, coordinate='Time', options={ 'Type': 'Highpass', 'f_low': 1e2, 'Design': 'Chebyshev II' }) filename += '_FILTERED' if flux_coordinates: flap.add_coordinate('GPI', 'Flux r') filename += '_FLUX' d = flap.slice_data( 'GPI', slicing={'Time': flap.Intervals(time_range[0], time_range[1])}) # time=d.coordinate('Time')[0] # ind=np.where(np.logical_and(time[:,0,0]>=time_range[0], time[:,0,0]<=time_range[1])) # x1=d.coordinate('Device R')[0][ind,:,:].flatten() # y=d.coordinate('Device z')[0][ind,:,:].flatten() # t=time[ind,:,:].flatten() # data=d.data[ind,:,:].flatten() t = d.coordinate('Time')[0].flatten() x1 = d.coordinate('Device R')[0].flatten() y = d.coordinate('Device z')[0].flatten() data = d.data.flatten() filename = filename + '.csv' if flux_coordinates: #x2=d.coordinate('Flux r')[0][ind,:,:].flatten() x2 = d.coordinate('Flux r')[0].flatten() x2 = (x2 - np.min(x2)) / (np.max(x2) - np.min(x2)) * ( np.max(x1) - np.min(x1)) + np.min(x1) np.savetxt( filename, np.asarray([[x1], [x2], [y], [10000 * t], [data]])[:, 0, :].T, delimiter=",", header='R [m], PSI_rescaled [m], z [m], t [0.1ms], data [a.u.]') else: np.savetxt(filename, np.asarray([[x1], [y], [10000 * t], [data]])[:, 0, :].T, delimiter=",", header='R [m], z [m], t [0.1ms], data [a.u.]')
def nstx_gpi_velocity_analysis_spatio_temporal_displacement(exp_id=None, #Shot number time_range=None, #The time range for the calculation data_object=None, #Input data object if available from outside (e.g. generated sythetic signal) x_range=[0,63], #X range for the calculation y_range=[0,79], #Y range for the calculation x_search=10, y_search=10, fbin=10, plot=True, #Plot the results pdf=False, #Print the results into a PDF plot_error=False, #Plot the errorbars of the velocity calculation based on the line fitting and its RMS error #File input/output options filename=None, #Filename for restoring data nocalc=True, #Restore the results from the .pickle file from filename+.pickle return_results=False, ): #Constants for the calculation #Using the spatial calibration to find the actual velocities. coeff_r=np.asarray([3.7183594,-0.77821046,1402.8097])/1000. #The coordinates are in meters, the coefficients are in mm coeff_z=np.asarray([0.18090118,3.0657776,70.544312])/1000. #The coordinates are in meters, the coefficients are in mm #Input error handling if exp_id is None and data_object is None: raise ValueError('Either exp_id or data_object needs to be set for the calculation.') if data_object is None: if time_range is None and filename is None: raise ValueError('It takes too much time to calculate the entire shot, please set a time_range.') else: if type(time_range) is not list and filename is None: raise TypeError('time_range is not a list.') if filename is None and len(time_range) != 2: raise ValueError('time_range should be a list of two elements.') if data_object is not None and type(data_object) == str: if exp_id is None: exp_id='*' d=flap.get_data_object(data_object,exp_id=exp_id) time_range=[d.coordinate('Time')[0][0,0,0], d.coordinate('Time')[0][-1,0,0]] exp_id=d.exp_id flap.add_data_object(d, 'GPI_SLICED_FULL') if filename is None: wd=flap.config.get_all_section('Module NSTX_GPI')['Working directory'] comment='' filename=flap_nstx.analysis.filename(exp_id=exp_id, working_directory=wd+'/processed_data', time_range=time_range, purpose='sz velocity', comment=comment) pickle_filename=filename+'.pickle' if not os.path.exists(pickle_filename) and nocalc: print('The pickle file cannot be loaded. Recalculating the results.') nocalc=False if nocalc is False: slicing={'Time':flap.Intervals(time_range[0],time_range[1])} #Read data if data_object is None: print("\n------- Reading NSTX GPI data --------") d=flap.get_data('NSTX_GPI',exp_id=exp_id, name='', object_name='GPI') d=flap.slice_data('GPI',exp_id=exp_id, slicing=slicing, output_name='GPI_SLICED_FULL') d.data=np.asarray(d.data, dtype='float32') count=d.data.shape[0] vpol_p = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # poloidal velocity in km/sec vs. pixel vrad_p = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # radial velocity vs. pixel vpol_n = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # poloidal velocity in km/sec vs. pixel vrad_n = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # radial velocity vs. pixel vpol = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) vrad = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) cmax_n = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) cmax_p = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) sample_time=d.coordinate('Time')[0][1,0,0]-d.coordinate('Time')[0][0,0,0] ccorr_n=np.zeros([x_range[1]-x_range[0]+1, y_range[1]-y_range[0]+1, x_range[1]-x_range[0]+2*x_search+1, y_range[1]-y_range[0]+2*y_search+1]) ccorr_p=np.zeros([x_range[1]-x_range[0]+1, y_range[1]-y_range[0]+1, x_range[1]-x_range[0]+2*x_search+1, y_range[1]-y_range[0]+2*y_search+1]) for t0 in range(fbin+1,count-fbin-1): #Zero lag Autocorrelation calculation for the reference, +sample_time, -sample_time data n_data=d.data[t0-fbin-1:t0+fbin-1, x_range[0]-x_search:x_range[1]+x_search+1, y_range[0]-y_search:y_range[1]+y_search+1] acorr_pix_n=np.sqrt(np.sum((n_data-np.mean(n_data, axis=0))**2,axis=0)) p_data=d.data[t0-fbin+1:t0+fbin+1, x_range[0]-x_search:x_range[1]+x_search+1, y_range[0]-y_search:y_range[1]+y_search+1] acorr_pix_p=np.sqrt(np.sum((p_data-np.mean(p_data, axis=0))**2,axis=0)) ref_data=d.data[t0-fbin:t0+fbin, x_range[0]:x_range[1]+1, y_range[0]:y_range[1]+1] acorr_pix_ref=np.sqrt(np.sum((ref_data-np.mean(ref_data, axis=0))**2,axis=0)) print((t0-fbin-1)/(count-2*(fbin-1))*100.) #Zero lag Crosscovariance calculation for the positive and negative sample time signal for i0 in range(x_range[1]-x_range[0]+1): for j0 in range(y_range[1]-y_range[0]+1): frame_ref=d.data[t0-fbin:t0+fbin,i0+x_range[0],j0+y_range[0]] frame_ref=frame_ref-np.mean(frame_ref) for i1 in range(2*x_search+1): for j1 in range(2*y_search+1): frame_n=d.data[t0-fbin-1:t0+fbin-1, i1+i0+x_range[0]-x_search, j1+j0+y_range[0]-y_search] frame_n=frame_n-np.mean(frame_n) frame_p=d.data[t0-fbin+1:t0+fbin+1, i1+i0+x_range[0]-x_search, j1+j0+y_range[0]-y_search] frame_p=frame_p-np.mean(frame_p) ccorr_n[i0,j0,i1,j1]=np.sum(frame_ref*frame_n) ccorr_p[i0,j0,i1,j1]=np.sum(frame_ref*frame_p) #Calculating the actual cross-correlation coefficients for i0 in range(x_range[1]-x_range[0]+1): for j0 in range(y_range[1]-y_range[0]+1): vcorr_p=np.zeros([2*x_search+1,2*y_search+1]) vcorr_n=np.zeros([2*x_search+1,2*y_search+1]) for i1 in range(2*x_search+1): for j1 in range(2*y_search+1): vcorr_p[i1,j1]=ccorr_p[i0,j0,i1,j1]/(acorr_pix_ref[i0,j0]*acorr_pix_p[i0+i1,j0+j1]) vcorr_n[i1,j1]=ccorr_n[i0,j0,i1,j1]/(acorr_pix_ref[i0,j0]*acorr_pix_n[i0+i1,j0+j1]) #Calculating the displacement in pixel coordinates index_p=np.unravel_index(np.argmax(vcorr_p),shape=vcorr_p.shape) index_n=np.unravel_index(np.argmax(vcorr_n),shape=vcorr_n.shape) cmax_p[i0,j0,t0]=vcorr_p[index_p] cmax_n[i0,j0,t0]=vcorr_n[index_n] #Transforming the coordinates into spatial coordinates delta_index_p=np.asarray(index_p)-np.asarray([x_search,y_search]) delta_index_n=np.asarray(index_n)-np.asarray([x_search,y_search]) vpol_p[i0,j0,t0]=(coeff_z[0]*delta_index_p[0]+ coeff_z[1]*delta_index_p[1])/sample_time vpol_n[i0,j0,t0]=(coeff_z[0]*delta_index_n[0]+ coeff_z[1]*delta_index_n[1])/sample_time vrad_p[i0,j0,t0]=(coeff_r[0]*delta_index_p[0]+ coeff_r[1]*delta_index_p[1])/sample_time vrad_n[i0,j0,t0]=(coeff_r[0]*delta_index_n[0]+ coeff_r[1]*delta_index_n[1])/sample_time #Calculating the average between the positive and negative shifted pixels vpol_tot = (vpol_p - vpol_n)/2. # Average p and n correlations vrad_tot = (vrad_p - vrad_n)/2. # This is non causal #Averaging in an fbin long time window for t0 in range(int(fbin/2),count-int(fbin/2)): vpol[:,:,t0] = np.mean(vpol_tot[:,:,t0-int(fbin/2):t0+int(fbin/2)], axis=2) vrad[:,:,t0] = np.mean(vrad_tot[:,:,t0-int(fbin/2):t0+int(fbin/2)], axis=2) results={'Time':d.coordinate('Time')[0][:,0,0], 'Radial velocity':vrad, 'Poloidal velocity':vpol, 'Maximum correlation p':cmax_p, 'Maximum correlation n':cmax_n} pickle.dump(results, open(pickle_filename, 'wb')) else: results=pickle.load(open(pickle_filename, 'rb')) print('Data loaded from pickle file.') if pdf: pdf=PdfPages(filename.replace('processed_data', 'plots')+'.pdf') if plot: plt.figure() plt.errorbar(results['Time'], np.mean(results['Radial velocity'], axis=(0,1)), np.sqrt(np.var(results['Radial velocity'], axis=(0,1)))) plt.title('Radial velocity vs time') plt.xlabel('Time [s]') plt.ylabel('Radial velocity [m/s]') if pdf: pdf.savefig() plt.figure() plt.errorbar(results['Time'], np.mean(results['Poloidal velocity'], axis=(0,1)), np.sqrt(np.var(results['Poloidal velocity'], axis=(0,1)))) plt.title('Poloidal velocity vs time') plt.xlabel('Time [s]') plt.ylabel('Poloidal velocity [m/s]') plt.pause(0.001) if pdf: pdf.savefig() plt.figure() plt.errorbar(results['Time'], np.mean(results['Maximum correlation p'], axis=(0,1)), np.sqrt(np.var(results['Maximum correlation p'], axis=(0,1)))) plt.title('Maximum correlation p vs time') plt.xlabel('Time [s]') plt.ylabel('Maximum correlation p') plt.pause(0.001) if pdf: pdf.savefig() pdf.close() if return_results: return results