def plot_results_for_paper(): pearson=False wd=flap.config.get_all_section('Module NSTX_GPI')['Working directory'] #Figure 1 '''NO CODE IS NEEDED''' #Figure 2 '''NO CODE IS NEEDED''' #Figure 3 from flap_nstx.analysis import show_nstx_gpi_video_frames #fig, ax = plt.subplots(figsize=(6.5,5)) if plot[3]: gs=GridSpec(5,2) ax,fig=plt.subplots(figsize=(8.5/2.54,6)) pdf=PdfPages(wd+'/plots/figure_3_139901_basic_plots.pdf') plt.subplot(gs[0,0]) flap.get_data('NSTX_MDSPlus', name='\WF::\DALPHA', exp_id=139901, object_name='DALPHA').plot(options={'Axes visibility':[False,True]}) plt.xlim([0,1.2]) plt.subplot(gs[1,0]) flap.get_data('NSTX_GPI', name='', exp_id=139901, object_name='GPI').slice_data(summing={'Image x':'Mean', 'Image y':'Mean'}).plot(options={'Axes visibility':[False,True]}) plt.xlim([0,1.2]) plt.xlim([0,1.2]) plt.subplot(gs[2,0]) flap.get_data('NSTX_MDSPlus', name='IP', exp_id=139901, object_name='IP').plot(options={'Axes visibility':[False,True]}) plt.xlim([0,1.2]) plt.subplot(gs[3,0]) d=flap_nstx_thomson_data(exp_id=139901, density=True, output_name='DENSITY') dR = d.coordinate('Device R')[0][:,:]-np.insert(d.coordinate('Device R')[0][0:-1,:],0,0,axis=0) LID=np.sum(d.data*dR,axis=0) plt.plot(d.coordinate('Time')[0][0,:],LID) plt.title('Line integrated density') plt.xlabel('Time [s]') plt.ylabel('n_e [m^-2]') plt.xlim([0,1.2]) ax=plt.gca() ax.get_xaxis().set_visible(False) plt.subplot(gs[4,0]) magnetics=flap.get_data('NSTX_MDSPlus', name='\OPS_PC::\\BDOT_L1DMIVVHF5_RAW', exp_id=139901, object_name='MIRNOV') magnetics.coordinates.append(copy.deepcopy(flap.Coordinate(name='Time equi', unit='s', mode=flap.CoordinateMode(equidistant=True), shape = [], start=magnetics.coordinate('Time')[0][0], step=magnetics.coordinate('Time')[0][1]-magnetics.coordinate('Time')[0][0], dimension_list=[0]))) magnetics.filter_data(coordinate='Time equi', options={'Type':'Bandpass', 'f_low':100e3, 'f_high':500e3, 'Design':'Elliptic'}).plot() plt.xlim([0,1.2]) plt.subplot(gs[0,1]) flap.get_data('NSTX_MDSPlus', name='\WF::\DALPHA', exp_id=139901, object_name='DALPHA').plot(options={'Axes visibility':[False,False]}) plt.xlim([0.25,0.4]) plt.subplot(gs[1,1]) flap.get_data('NSTX_GPI', name='', exp_id=139901, object_name='GPI').slice_data(summing={'Image x':'Mean', 'Image y':'Mean'}).plot(options={'Axes visibility':[False,False]}) plt.xlim([0.25,0.4]) plt.subplot(gs[2,1]) flap.get_data('NSTX_MDSPlus', name='IP', exp_id=139901, object_name='IP').plot(options={'Axes visibility':[False,False]}) plt.xlim([0.25,0.4]) plt.subplot(gs[3,1]) d=flap_nstx_thomson_data(exp_id=139901, density=True, output_name='DENSITY') dR = d.coordinate('Device R')[0][:,:]-np.insert(d.coordinate('Device R')[0][0:-1,:],0,0,axis=0) LID=np.sum(d.data*dR,axis=0) plt.plot(d.coordinate('Time')[0][0,:],LID) plt.title('Line integrated density') plt.xlabel('Time [s]') plt.ylabel('n_e [m^-2]') plt.xlim([0.25,0.4]) ax=plt.gca() ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) plt.subplot(gs[4,1]) magnetics=flap.get_data('NSTX_MDSPlus', name='\OPS_PC::\\BDOT_L1DMIVVHF5_RAW', exp_id=139901, object_name='MIRNOV') magnetics.coordinates.append(copy.deepcopy(flap.Coordinate(name='Time equi', unit='s', mode=flap.CoordinateMode(equidistant=True), shape = [], start=magnetics.coordinate('Time')[0][0], step=magnetics.coordinate('Time')[0][1]-magnetics.coordinate('Time')[0][0], dimension_list=[0]))) magnetics.filter_data(coordinate='Time equi', options={'Type':'Bandpass', 'f_low':100e3, 'f_high':500e3, 'Design':'Elliptic'}).plot(slicing={'Time':flap.Intervals(0.25,0.4)}) plt.xlim([0.25,0.4]) ax=plt.gca() ax.get_yaxis().set_visible(False) pdf.savefig() pdf.close() if plot[4]: plt.figure() ax,fig=plt.subplots(figsize=(3.35*2,5.5)) pdf=PdfPages(wd+'/plots/figure_5_139901_0.3249158_30_frame.pdf') show_nstx_gpi_video_frames(exp_id=139901, start_time=0.3249158, n_frame=30, logz=False, z_range=[0,3900], plot_filtered=False, normalize=False, cache_data=False, plot_flux=False, plot_separatrix=True, flux_coordinates=False, device_coordinates=True, new_plot=False, save_pdf=True, colormap='gist_ncar', save_for_paraview=False, colorbar_visibility=True ) pdf.savefig() pdf.close() #Figure 5 if plot[5] or plot[6] or plot[7]: try: d1,d2,d3,d4=pickle.load(open(wd+'/processed_data/fig_6_8_flap_object.pickle','rb')) flap.add_data_object(d1, 'GPI_SLICED_FULL') flap.add_data_object(d2, 'GPI_GAS_CLOUD') flap.add_data_object(d3, 'GPI_SLICED_DENORM_CCF_VEL') flap.add_data_object(d4, 'GPI_CCF_F_BY_F') except: calculate_nstx_gpi_avg_frame_velocity(exp_id=139901, time_range=[0.325-1e-3,0.325+1e-3], plot=False, subtraction_order_for_velocity=1, skip_structure_calculation=False, correlation_threshold=0., pdf=False, nlevel=51, nocalc=False, filter_level=3, normalize_for_size=True, normalize_for_velocity=True, threshold_coeff=1., normalize_f_high=1e3, normalize='roundtrip', velocity_base='cog', return_results=False, plot_gas=True) pickle.dump((flap.get_data_object('GPI_SLICED_FULL'), flap.get_data_object('GPI_GAS_CLOUD'), flap.get_data_object('GPI_SLICED_DENORM_CCF_VEL'), flap.get_data_object('GPI_CCF_F_BY_F')), open(wd+'/processed_data/fig_6_8_flap_object.pickle','wb')) if plot[5]: pdf=PdfPages(wd+'/plots/figure_6_normalization.pdf') times=[0.3245,0.3249560,0.3255] signals=['GPI_SLICED_FULL', 'GPI_GAS_CLOUD', 'GPI_SLICED_DENORM_CCF_VEL'] gs=GridSpec(3,3) plt.figure() ax,fig=plt.subplots(figsize=(3.35,4)) titles=['Raw frame', 'Gas cloud', 'Normalized'] for index_grid_x in range(3): for index_grid_y in range(3): plt.subplot(gs[index_grid_x,index_grid_y]) visibility=[True,True] if index_grid_x != 3-1: visibility[0]=False if index_grid_y != 0: visibility[1]=False # if index_grid_x == 0: # z_range=[0,4096] # elif index_grid_x == 1: # z_range=[0,400] # elif index_grid_x == 2: # z_range=[0,40] z_range=None flap.plot(signals[index_grid_x], plot_type='contour', slicing={'Time':times[index_grid_y]}, axes=['Image x', 'Image y'], options={'Z range':z_range, 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':visibility, #'Colormap':'gist_ncar', 'Colorbar':True, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) if index_grid_x == 0: #ax=plt.gca() plt.title(f"{times[index_grid_y]*1e3:.3f}"+' '+titles[index_grid_x]) else: plt.title(titles[index_grid_x]) pdf.savefig() pdf.close() #Figure 6 if plot[6]: flap.get_data('NSTX_GPI',exp_id=139901, name='', object_name='GPI') flap.slice_data('GPI', slicing={'Time':flap.Intervals(0.3245,0.3255)}, output_name='GPI_SLICED_FULL') data_object_name='GPI_SLICED_DENORM_CCF_VEL' detrended=flap_nstx.analysis.detrend_multidim(data_object_name, exp_id=139901, order=4, coordinates=['Image x', 'Image y'], output_name='GPI_DETREND_VEL') d=copy.deepcopy(flap.get_data_object(data_object_name)) d.data=d.data-detrended.data flap.add_data_object(d,'GPI_TREND') signals=[data_object_name, 'GPI_TREND', 'GPI_DETREND_VEL'] pdf=PdfPages(wd+'/plots/figure_7_trend_subtraction.pdf') gs=GridSpec(1,3) plt.figure() ax,fig=plt.subplots(figsize=(8.5/2.54,2)) for index_grid_x in range(3): plt.subplot(gs[index_grid_x]) visibility=[True,True] if index_grid_x != 0: visibility[1]=False z_range=[0,10] colorbar=False flap.plot(signals[index_grid_x], plot_type='contour', slicing={'Time':0.3249560}, #slicing={'Sample':29808}, axes=['Image x', 'Image y'], options={'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':visibility, #'Colormap':colormap, 'Colorbar':True, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) #fig.tight_layout() pdf.savefig() pdf.close() #Figure 7 if plot[7]: pdf=PdfPages(wd+'/plots/figure_8_CCF_frame_by_frame.pdf') gs=GridSpec(1,3) plt.figure() ax,fig=plt.subplots(figsize=(8.5/2.54,2)) plt.subplot(gs[0]) flap.plot('GPI_SLICED_FULL', plot_type='contour', slicing={'Sample':29806}, axes=['Image x', 'Image y'], options={ 'Z range':[0,4096], 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':[True,True], 'Colormap':'gist_ncar', 'Colorbar':False, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) plt.title("324.959ms") plt.subplot(gs[1]) flap.plot('GPI_SLICED_FULL', plot_type='contour', slicing={'Sample':29807}, axes=['Image x', 'Image y'], options={'Z range':[0,4096], 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':[True,False], 'Colorbar':False, 'Colormap':'gist_ncar', }, plot_options={'levels':51}, ) plt.title("324.961ms") plt.subplot(gs[2]) flap.plot('GPI_CCF_F_BY_F', plot_type='contour', slicing={'Sample':29807, 'Image x':flap.Intervals(-10,10),'Image y':flap.Intervals(-10,10)}, axes=['Image x', 'Image y'], options={ #'Z range':[0,2048], 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Axes visibility':[True,True], #'Colormap':colormap, 'Colorbar':True, #'Overplot options':oplot_options, }, plot_options={'levels':51}, ) plt.title("CCF") pdf.savefig() pdf.close() #Figure 8 if plot[8]: #2x2 frames with the found structures during an ELM burst calculate_nstx_gpi_avg_frame_velocity(exp_id=139901, time_range=[0.32495,0.325], plot=False, subtraction_order_for_velocity=4, skip_structure_calculation=False, correlation_threshold=0.5, pdf=True, nlevel=51, nocalc=False, filter_level=5, normalize_for_size=True, normalize_for_velocity=True, threshold_coeff=1., normalize_f_high=1e3, normalize='roundtrip', velocity_base='cog', return_results=False, plot_gas=True, structure_pixel_calc=True, structure_pdf_save=True, test_structures=True ) #Post processing done with illustrator #Figure 9 if plot[9]: #2x3 #Synthetic GPI signal #Postprocessing done with illustrator nstx_gpi_generate_synthetic_data(exp_id=1, time=0.0001, amplitude=1.0, output_name='test', poloidal_velocity=3e3, radial_velocity=0., poloidal_size=0.10, radial_size=0.05, waveform_divider=1, sinusoidal=True) d=flap.get_data_object('test', exp_id=1) d.data=d.data-np.mean(d.data,axis=0) calculate_nstx_gpi_avg_frame_velocity(data_object='test', exp_id=1, time_range=[0.000000,0.00005], plot=False, subtraction_order_for_velocity=1, skip_structure_calculation=False, correlation_threshold=0.5, pdf=True, nlevel=51, nocalc=False, filter_level=5, normalize_for_size=False, normalize_for_velocity=False, threshold_coeff=1., normalize_f_high=1e3, normalize=None, velocity_base='cog', return_results=False, plot_gas=False, structure_pixel_calc=True, structure_pdf_save=True, test_structures=True ) #Figure 10 if plot[10]: #Single shot results calculate_nstx_gpi_avg_frame_velocity(exp_id=139901, time_range=[0.325-2e-3,0.325+2e-3], plot_time_range=[0.325-0.5e-3,0.325+0.5e-3], plot=True, subtraction_order_for_velocity=4, skip_structure_calculation=False, correlation_threshold=0.6, pdf=True, nlevel=51, nocalc=True, gpi_plane_calculation=True, filter_level=5, normalize_for_size=True, normalize_for_velocity=True, threshold_coeff=1., normalize_f_high=1e3, normalize='roundtrip', velocity_base='cog', return_results=False, plot_gas=True, plot_for_publication=True, plot_scatter=False, overplot_average=False, overplot_str_vel=False) #2x3 #Done with Illustrator #Figure 12 if plot[11]: #Conditional averaged results calculate_avg_velocity_results(pdf=True, plot=True, plot_max_only=True, plot_for_publication=True, normalized_velocity=True, subtraction_order=4, normalized_structure=True, opacity=0.5, correlation_threshold=0.6, gpi_plane_calculation=True, plot_scatter=False) #Post processing done with Illustrator #Figure 11 if plot[12]: if pearson: pdf=PdfPages(wd+'/plots/figure_13_pearson_matrix.pdf') pearson=calculate_nstx_gpi_correlation_matrix(calculate_average=False, gpi_plane_calculation=True, window_average=0.050e-3, elm_burst_window=True) data=pearson[:,:,0] variance=pearson[:,:,1] data[10,10]=-1 plt.figure() plt.subplots(figsize=(8.5/2.54,8.5/2.54/1.618)) plt.matshow(data, cmap='seismic') plt.xticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max'], rotation='vertical') plt.yticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max']) plt.colorbar() plt.show() pdf.savefig() plt.figure() plt.subplots(figsize=(8.5/2.54,8.5/2.54/1.618)) variance[10,10]=-1 variance[9,9]=1 plt.matshow(variance, cmap='seismic') #plt.matshow(data, cmap='gist_ncar') plt.xticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max'], rotation='vertical') plt.yticks(ticks=np.arange(11), labels=['Velocity ccf R', #0,1 'Velocity ccf z', #0,1 'Velocity str max R', #2,3 'Velocity str max z', #2,3 'Size max R', #4,5 'Size max z', #4,5 'Position max R', #6,7 'Position max z', #6,7 'Area max', #8 'Elongation max', #9 'Angle max']) plt.colorbar() plt.show() pdf.savefig() pdf.close() else: pdf=PdfPages(wd+'/plots/figure_13_dependence.pdf') plt.figure() plt.subplots(figsize=(17/2.54,17/2.54/1.618)) plot_all_parameters_vs_all_other_average(window_average=0.2e-3, symbol_size=0.3, plot_error=True) pdf.savefig() pdf.close()
def test_filter(): plt.close('all') print() print('>>>>>>>>>>>>>>>>>>> Test filter <<<<<<<<<<<<<<<<<<<<<<<<') flap.delete_data_object('*') print( "**** Generating 10 square wave signals and filtering with integrating filter, 10 microsec" ) t = np.arange(1000) * 1e-6 d = np.ndarray((len(t), 10), dtype=float) for i in range(10): d[:, i] = np.sign(np.sin(math.pi * 2 * (1e4 + i * 1e3) * t)) + 1 c = flap.Coordinate(name='Time', unit='Second', mode=flap.CoordinateMode(equidistant=True), start=0.0, step=1e-6, dimension_list=[0]) d = flap.DataObject(data_array=d, coordinates=[c]) flap.add_data_object(d, "Signal") plt.figure() d.plot(options={'Y sep': 3}) di = d.filter_data(coordinate='Time', intervals=flap.Intervals(np.array([1e-4, 6e-4]), np.array([2e-4, 8e-4])), options={ 'Type': 'Int', 'Tau': 10e-6 }).plot(options={'Y sep': 3}) print("**** Filtering with differential filter, 10 microsec") plt.figure() d.plot(options={'Y sep': 3}) flap.filter_data('Signal', output_name='Signal_filt', coordinate='Time', intervals=flap.Intervals(np.array([1e-4, 6e-4]), np.array([2e-4, 8e-4])), options={ 'Type': 'Diff', 'Tau': 10e-6 }) flap.plot('Signal_filt', options={'Y sep': 3}) print( "**** Generating random data, 1 million points and overplotting spectra with various filters." ) d = flap.get_data('TESTDATA', name='TEST-1-1', options={ 'Signal': 'Random', 'Scaling': 'Digit', 'Length': 1 }, object_name='Signal') plt.figure() flap.filter_data('Signal', output_name='Signal_filt', coordinate='Time', options={ 'Type': 'Int', 'Tau': 16e-6 }) flap.apsd('Signal',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid = flap.apsd('Signal_filt',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid.plt_axis_list[-1].set_title("{'Type':'Int','Tau':16e-6}") plt.figure() flap.filter_data('Signal', output_name='Signal_filt', coordinate='Time', options={ 'Type': 'Diff', 'Tau': 16e-6 }) flap.apsd('Signal',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid = flap.apsd('Signal_filt',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid.plt_axis_list[-1].set_title("{'Type':'Diff','Tau':16e-6}") plt.figure() flap.filter_data('Signal', output_name='Signal_filt', coordinate='Time', options={ 'Type': 'Lowpass', 'f_high': 5e4 }) flap.apsd('Signal',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid = flap.apsd('Signal_filt',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid.plt_axis_list[-1].set_title("{'Type':'Lowpass','f_high':5e4}") plt.figure() flap.filter_data('Signal', output_name='Signal_filt', coordinate='Time', options={ 'Type': 'Highpass', 'f_low': 1e4, 'f_high': 5e4 }) flap.apsd('Signal',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid = flap.apsd('Signal_filt',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid.plt_axis_list[-1].set_title( "{'Type':'Highpass','f_low':1e4,'f_high':5e4}") plt.figure() flap.filter_data('Signal', output_name='Signal_filt', coordinate='Time', options={ 'Type': 'Bandpass', 'f_low': 5e3, 'f_high': 5e4 }) flap.apsd('Signal',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid = flap.apsd('Signal_filt',options={'Log':True,'Res':20,'Range':[100,5e5]},output_name='Signal_APSD')\ .plot(options={'Log x':True, 'Log y': True}) plotid.plt_axis_list[-1].set_title( "{'Type':'Bandpass','f_low':5e3,'f_high':5e4}") plt.figure() print("**** Bandpower signal [5e4-2e5] Hz, inttime 20 microsec") flap.filter_data('Signal', output_name='Signal_filt', coordinate='Time', options={ 'Type': 'Bandpass', 'f_low': 5e4, 'f_high': 2e5, 'Power': True, 'Inttime': 20e-6 }) plotid = flap.plot('Signal_filt') plotid.plt_axis_list[-1].set_title( "'Type':'Bandpass','f_low':5e4,'f_high':2e5, 'Power':True, 'Inttime':20e-6}" )
def nstx_gpi_generate_synthetic_data(exp_id=None, #Artificial exp_id, should be starting from zero and not the one which is used by e.g. background_shot time=None, #Time to be simulated in seconds sampling_time=2.5e-6, #The sampling time of the diagnostic #General parameters n_structures=3, amplitude=0.5, #Amplitude of the structure relative to the background. add_background=True, background_shot=139901, #The original of the background of the simulated signal. background_time_range=[0.31,0.32], #The time range of the background in the background_shot. poloidal_velocity=1e3, #The poloidal velocity of the structures, can be a list [n_structure] start_position=[1.41,0.195], radial_size=0.3, #The radial size of the structures. poloidal_size=0.1, #The poloidal size of the structures. #Parameters for a gaussian object gaussian=False, radial_velocity=1e2, #The radial velocity of the structures, can be a list [n_structure] poloidal_size_velocity=0., #The velocity of the size change in mm/ms radial_size_velocity=0., rotation=False, #Set rotation for the structure. rotation_frequency=None, #Set the frequency of the rotation of the structure. #Parameters for sinusoidal object sinusoidal=False, waveform_divider=1, y_lambda=0.05, #Wavelength in the y direction output_name=None, #Output name of the generated flap.data_object test=False, #Testing/debugging switch (mainly plotting and printing error messages) ): if rotation_frequency is None: rotation_frequency=0 n_time=int(time/sampling_time) data_arr=np.zeros([n_time,64,80]) background=np.zeros([64,80]) if add_background: background=flap.get_data('NSTX_GPI', exp_id=139901, name='', object_name='GPI_RAW') background=background.slice_data(slicing={'Time':flap.Intervals(background_time_range[0], background_time_range[1])}, summing={'Time':'Mean'}) amplitude=amplitude*background.data.max() #Spatial positions coeff_r=np.asarray([3.7183594,-0.77821046,1402.8097])/1000. #The coordinates are in meters coeff_z=np.asarray([0.18090118,3.0657776,70.544312])/1000. #The coordinates are in meters r_coordinates=np.zeros([64,80]) z_coordinates=np.zeros([64,80]) for i_x in range(64): for i_y in range(80): r_coordinates[i_x,i_y]=coeff_r[0]*i_x+coeff_r[1]*i_y+coeff_r[2] z_coordinates[i_x,i_y]=coeff_z[0]*i_x+coeff_z[1]*i_y+coeff_z[2] if gaussian: r0=start_position for i_frames in range(n_time): for i_structures in range(n_structures): cur_time=i_frames * sampling_time rot_arg=2*np.pi*rotation_frequency*cur_time a=(np.cos(rot_arg)/(radial_size+radial_size_velocity*cur_time))**2+\ (np.sin(rot_arg)/(poloidal_size+poloidal_size_velocity*cur_time))**2 b=-0.5*np.sin(2*rot_arg)/(radial_size+radial_size_velocity*cur_time)**2+\ 0.5*np.sin(2*rot_arg)/(poloidal_size+poloidal_size_velocity*cur_time)**2 c=(np.sin(rot_arg)/(radial_size+radial_size_velocity*cur_time))**2+\ (np.cos(rot_arg)/(poloidal_size+poloidal_size_velocity*cur_time))**2 x0=r0[i_structures,0]+radial_velocity[i_structures]*cur_time y0=r0[i_structures,1]+poloidal_velocity[i_structures]*cur_time frame=np.zeros([64,80]) for j_vertical in range(80): for k_radial in range(64): x=r_coordinates[k_radial,j_vertical] y=z_coordinates[k_radial,j_vertical] if (x > x0+radial_size*2 or x < x0-radial_size*2 or y > y0+radial_size*2 or y < y0-radial_size*2): frame[k_radial,j_vertical]=0. else: frame[k_radial,j_vertical]=(amplitude[i_structures]*np.exp(-0.5*(a*(x-x0)**2 + 2*b*(x-x0)*(y-y0) + c*(y-y0)**2)) +background.data[k_radial,j_vertical]) data_arr[i_frames,:,:]+=frame if sinusoidal: x0=start_position[0] y0=start_position[1] ky=np.pi/poloidal_size omega=poloidal_velocity*ky phi0=0 for i_frames in range(n_time): cur_time=i_frames * sampling_time for j_vertical in range(80): for k_radial in range(64): x=r_coordinates[k_radial,j_vertical] y=z_coordinates[k_radial,j_vertical] A=1/np.sqrt(2*np.pi*radial_size)*np.exp(-0.5*(np.abs(x-(x0+radial_velocity*cur_time))/(radial_size/2.355))**2) arg=ky*y-omega*cur_time+phi0 division=(scipy.signal.square(arg/waveform_divider, duty=0.5/waveform_divider)+1)/2. data_arr[i_frames,k_radial,j_vertical]=amplitude*A*np.sin(arg)*division data_arr[i_frames,k_radial,j_vertical]+=background.data[k_radial,j_vertical] #Adding the coordinates to the data object: coord = [None]*6 coord[0]=(copy.deepcopy(flap.Coordinate(name='Time', unit='s', mode=flap.CoordinateMode(equidistant=True), start=0., step=sampling_time, #shape=time_arr.shape, dimension_list=[0] ))) coord[1]=(copy.deepcopy(flap.Coordinate(name='Sample', unit='n.a.', mode=flap.CoordinateMode(equidistant=True), start=0, step=1, dimension_list=[0] ))) coord[2]=(copy.deepcopy(flap.Coordinate(name='Image x', unit='Pixel', mode=flap.CoordinateMode(equidistant=True), start=0, step=1, shape=[], dimension_list=[1] ))) coord[3]=(copy.deepcopy(flap.Coordinate(name='Image y', unit='Pixel', mode=flap.CoordinateMode(equidistant=True), start=0, step=1, shape=[], dimension_list=[2] ))) coord[4]=(copy.deepcopy(flap.Coordinate(name='Device R', unit='m', mode=flap.CoordinateMode(equidistant=False), values=r_coordinates, shape=r_coordinates.shape, dimension_list=[1,2] ))) coord[5]=(copy.deepcopy(flap.Coordinate(name='Device z', unit='m', mode=flap.CoordinateMode(equidistant=False), values=z_coordinates, shape=z_coordinates.shape, dimension_list=[1,2] ))) _options={} _options["Trigger time [s]"]=0. _options["FPS"]=1/sampling_time _options["Sample time [s]"]=sampling_time _options["Exposure time [s]"]=2.1e-6 _options["X size"]=64 _options["Y size"]=80 _options["Bits"]=32 d = flap.DataObject(data_array=data_arr, data_unit=flap.Unit(name='Signal',unit='Digit'), coordinates=coord, exp_id=exp_id, data_title='Simulated signal', info={'Options':_options}, data_source="NSTX_GPI") if output_name is not None: flap.add_data_object(d,output_name) return d
def get_fit_nstx_thomson_profiles( exp_id=None, #Shot number pressure=False, #Return the pressure profile paramenters temperature=False, #Return the temperature profile parameters density=False, #Return the density profile parameters spline_data=False, #Calculate the results from the spline data (no error is going to be taken into account) device_coordinates=False, #Calculate the results as a function of device coordinates radial_range=None, #Radial range of the pedestal (only works when the device coorinates is set) flux_coordinates=False, #Calculate the results in flux coordinates flux_range=None, #The normalaized flux coordinates range for returning the results test=False, output_name=None, return_parameters=False, plot_time=None, pdf_object=None, ): """ Returns a dataobject which has the largest corresponding gradient based on the tanh fit. Fitting is based on publication https://aip.scitation.org/doi/pdf/10.1063/1.4961554 The linear background is not usitlized, instead of the mtanh, only tanh is used. """ if ((device_coordinates and flux_range is not None) or (flux_coordinates and radial_range is not None)): raise ValueError( 'When flux or device coordinates are set, only flux or radial range can be set! Returning...' ) d = flap_nstx_thomson_data(exp_id=exp_id, force_mdsplus=False, pressure=pressure, temperature=temperature, density=density, spline_data=False, add_flux_coordinates=True, output_name='THOMSON_DATA') if flux_coordinates: r_coord_name = 'Flux r' if device_coordinates: r_coord_name = 'Device R' time = d.coordinate('Time')[0][0, :] thomson_profile = { 'Time': time, 'Data': d.data, 'Error': d.error, 'Device R': d.coordinate('Device R')[0], 'Flux r': d.coordinate('Flux r')[0], 'Height': np.zeros(time.shape), 'Width': np.zeros(time.shape), 'Slope': np.zeros(time.shape), 'Position': np.zeros(time.shape), 'SOL offset': np.zeros(time.shape), 'Max gradient': np.zeros(time.shape), 'Error': { 'Height': np.zeros(time.shape), 'SOL offset': np.zeros(time.shape), 'Position': np.zeros(time.shape), 'Width': np.zeros(time.shape), 'Max gradient': np.zeros(time.shape) } } if test: plt.figure() if flux_range is not None: x_range = flux_range if radial_range is not None: x_range = radial_range # def mtanh_fit_function(r, b_height, b_sol, b_pos, b_width, b_slope): #This version of the code is not working due to the b_slope linear dependence # def mtanh(x,b_slope): # return ((1+b_slope*x)*np.exp(x)-np.exp(-x))/(np.exp(x)+np.exp(-x)) # return (b_height-b_sol)/2*(mtanh((b_pos-r)/(2*b_width),b_slope)+1)+b_sol def tanh_fit_function(r, b_height, b_sol, b_pos, b_width): def tanh(x): return (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x)) return (b_height - b_sol) / 2 * (tanh( (b_pos - r) / (2 * b_width)) + 1) + b_sol for i_time in range(len(time)): x_data = d.coordinate(r_coord_name)[0][:, i_time] y_data = d.data[:, i_time] y_data_error = d.error[:, i_time] if r_coord_name == 'Flux r': x_data = x_data[np.argmin(x_data):] if np.sum(np.isinf(x_data)) != 0: continue try: ind_coord = np.where( np.logical_and(x_data > x_range[0], x_data <= x_range[1])) x_data = x_data[ind_coord] y_data = y_data[ind_coord] y_data_error = y_data_error[ind_coord] # print(x_data) # print(ind_coord) p0 = [ y_data[0], #b_height y_data[-1], #b_sol x_data[0], #b_pos (x_data[-1] - x_data[0]) / 2., #b_width #(y_data[0]-y_data[-1])/(x_data[0]-x_data[-1]), #b_slope this is supposed to be some kind of liear modification to the #tanh function called mtanh. It messes up the fitting quite a bit and it's not useful at all. ] popt, pcov = curve_fit(tanh_fit_function, x_data, y_data, sigma=y_data_error, p0=p0) perr = np.sqrt(np.diag(pcov)) if test or (plot_time is not None and np.abs(plot_time - time[i_time]) < 1e-3): plt.cla() plt.scatter(x_data, y_data, color='tab:blue') plt.errorbar(x_data, y_data, yerr=y_data_error, marker='o', color='tab:blue', ls='') plt.plot(x_data, tanh_fit_function(x_data, *popt)) if flux_coordinates: xlabel = 'PSI_norm' else: xlabel = 'Device R [m]' if temperature: profile_string = 'temperature' ylabel = 'Temperature [keV]' elif density: profile_string = 'density' ylabel = 'Density [1/m3]' elif pressure: profile_string = 'pressure' ylabel = 'Pressure [kPa]' time_string = ' @ ' + str(time[i_time]) plt.title('Fit ' + profile_string + ' profile of ' + str(exp_id) + time_string) plt.xlabel(xlabel) plt.ylabel(ylabel) plt.pause(0.001) if pdf_object is not None: pdf_object.savefig() else: pass # plt.plot(x_data,mtanh_fit_function(x_data,*p0)) thomson_profile['Height'][i_time] = popt[0] thomson_profile['SOL offset'][i_time] = popt[1] thomson_profile['Position'][i_time] = popt[2] thomson_profile['Width'][i_time] = popt[3] thomson_profile['Max gradient'][i_time] = (popt[1] - popt[0]) / ( popt[3]) #from paper and pen calculation thomson_profile['Error']['Height'][i_time] = perr[0] thomson_profile['Error']['SOL offset'][i_time] = perr[1] thomson_profile['Error']['Position'][i_time] = perr[2] thomson_profile['Error']['Width'][i_time] = perr[3] thomson_profile['Error']['Max gradient'][i_time] = 1 / (np.abs( popt[3])) * (np.abs(perr[1]) + np.abs(perr[0])) + np.abs( (popt[1] - popt[0]) / (popt[3]**2)) * np.abs(perr[3]) #thomson_profile_parameters['Slope'][i_time]=popt[4] except: popt = [np.nan, np.nan, np.nan, np.nan] coord = [] coord.append( copy.deepcopy( flap.Coordinate( name='Time', unit='s', mode=flap.CoordinateMode(equidistant=True), start=time[0], step=time[1] - time[0], #shape=time_arr.shape, dimension_list=[0]))) coord.append( copy.deepcopy( flap.Coordinate(name='Sample', unit='n.a.', mode=flap.CoordinateMode(equidistant=True), start=0, step=1, dimension_list=[0]))) if device_coordinates: grad_unit = '/m' if flux_coordinates: grad_unit = '/psi' if pressure: data_unit = flap.Unit(name='Pressure gradient', unit='kPa' + grad_unit) elif temperature: data_unit = flap.Unit(name='Temperature gradient', unit='keV' + grad_unit) elif density: data_unit = flap.Unit(name='Density gradient', unit='m-3' + grad_unit) if spline_data: data_title = 'NSTX Thomson gradient' else: data_title = 'NSTX Thomson gradient spline' d = flap.DataObject(exp_id=exp_id, data_array=thomson_profile['Max gradient'], data_unit=data_unit, coordinates=coord, data_title=data_title) if output_name is not None: flap.add_data_object(d, output_name) if not return_parameters: return d else: return thomson_profile
def test_image(): plt.close('all') print() print('>>>>>>>>>>>>>>>>>>> Test image <<<<<<<<<<<<<<<<<<<<<<<<') flap.delete_data_object('*') print("**** Generating a sequence of test images") flap.get_data('TESTDATA', name='VIDEO', object_name='TEST_VIDEO', options={ 'Length': 0.1, 'Samplerate': 1e3, 'Frequency': 10, 'Spotsize': 100 }) flap.list_data_objects() print("***** Showing one image") plt.figure() flap.plot('TEST_VIDEO', slicing={'Time': 30e-3 / 4}, plot_type='image', axes=['Image x', 'Image y'], options={'Clear': True}) plt.figure() print("**** Showing a sequence of images and saving to test_video.avi") flap.plot('TEST_VIDEO', plot_type='anim-image', axes=['Image x', 'Image y', 'Time'], options={ 'Z range': [0, 4095], 'Wait': 0.01, 'Clear': True, 'Video file': 'test_video.avi', 'Colorbar': True, 'Aspect ratio': 'equal' }) plt.figure() print( "*** Showing the same images as contour plots and saving to test_video_contour.avi" ) flap.plot('TEST_VIDEO', plot_type='anim-contour', axes=['Image x', 'Image y', 'Time'], options={ 'Z range': [0, 4095], 'Wait': 0.01, 'Clear': True, 'Video file': 'test_video_contour.avi', 'Colorbar': False }) print("*** Converting data object x, y coordinates to non-equidistant.") d = flap.get_data_object('TEST_VIDEO') coord_x = d.get_coordinate_object('Image x') index = [0] * 3 index[coord_x.dimension_list[0]] = ... x = np.squeeze(d.coordinate('Image x', index=index)[0]) coord_x.mode.equidistant = False coord_x.values = x coord_x.shape = x.shape coord_y = d.get_coordinate_object('Image y') index = [0] * 3 index[coord_y.dimension_list[0]] = ... y = np.squeeze(d.coordinate('Image y', index=index)[0]) coord_y.mode.equidistant = False coord_y.values = y coord_y.shape = y.shape flap.add_data_object(d, "TEST_VIDEO_noneq") flap.list_data_objects() plt.figure() print("**** Showing this video and saving to test_video_noneq.avi") flap.plot('TEST_VIDEO_noneq', plot_type='anim-image', axes=['Image x', 'Image y', 'Time'], options={ 'Z range': [0, 4095], 'Wait': 0.01, 'Clear': True, 'Video file': 'test_video_noneq.avi', 'Colorbar': True, 'Aspect ratio': 'equal' })
def flap_nstx_thomson_data(exp_id=None, force_mdsplus=False, pressure=False, temperature=False, density=False, spline_data=False, add_flux_coordinates=True, output_name=None, test=False): """ Returns the Thomson scattering processed data from the MDSplus tree as a dictionary containing all the necessary parameters. The description of the dictionary can be seen below. """ if pressure + temperature + density != 1: raise ValueError( 'Either pressure or temperature or density can be set, neither none, nor more than one.' ) if exp_id is None: raise TypeError('exp_id must be set.') wd = flap.config.get_all_section('Module NSTX_GPI')['Local datapath'] filename = wd + '/' + str(exp_id) + '/nstx_mdsplus_thomson_' + str( exp_id) + '.pickle' if not os.path.exists(filename) or force_mdsplus: conn = mds.Connection('skylark.pppl.gov:8501') conn.openTree('activespec', exp_id) mdsnames = [ 'ts_times', #The time vector of the measurement (60Hz measurement with the Thomson) 'FIT_RADII', #Radius of the measurement 'FIT_R_WIDTH', #N/A (proably error of the radious) 'FIT_TE', #Electron temperature profile numpy array([radius,time]) 'FIT_TE_ERR', #The error for Te (symmetric) 'FIT_NE', #Electron density profile numpy array([radius,time]) 'FIT_NE_ERR', #The error for ne (symmetric) 'FIT_PE', #Electron pressure profile numpy array([radius,time]) 'FIT_PE_ERR', #The error for pe (symmetric) 'SPLINE_RADII', #Spline fit of the previous results (4times interpolation compared to the previous ones) 'SPLINE_NE', #Spline fit ne without error 'SPLINE_PE', #Spline fit pe without error 'SPLINE_TE', #Spline fit Te without error 'TS_LD', #N/A 'LASER_ID', #ID of the Thomson laser 'VALID', #Validity of the measurement 'DATEANALYZED', #The date when the analysis was done for the data 'COMMENT' ] #Comment for the analysis thomson = {} for name in mdsnames: thomson[name] = conn.get('\TS_BEST:' + name).data() if name == 'ts_times' and type(thomson[name]) is str: raise ValueError('No Thomson data available.') thomson['FIT_R_WIDTH'] /= 100. thomson['FIT_RADII'] /= 100. thomson['SPLINE_RADII'] /= 100. thomson['FIT_NE'] *= 10e6 thomson['FIT_NE_ERR'] *= 10e6 thomson['SPLINE_NE'] *= 10e6 conn.closeAllTrees() conn.disconnect() try: pickle.dump(thomson, open(filename, 'wb')) except: raise IOError( 'The path ' + filename + ' cannot be accessed. Pickle file cannot be created.') else: thomson = pickle.load(open(filename, 'rb')) thomson_time = thomson['ts_times'] coord = [] coord.append( copy.deepcopy( flap.Coordinate( name='Time', unit='s', mode=flap.CoordinateMode(equidistant=True), start=thomson_time[0], step=thomson_time[1] - thomson_time[0], #shape=time_arr.shape, dimension_list=[1]))) coord.append( copy.deepcopy( flap.Coordinate(name='Sample', unit='n.a.', mode=flap.CoordinateMode(equidistant=True), start=0, step=1, dimension_list=[1]))) if spline_data: thomson_r_coord = thomson['SPLINE_RADII'] if pressure: data_arr = thomson['SPLINE_PE'] data_arr_err = None data_unit = flap.Unit(name='Pressure', unit='kPa') elif temperature: data_arr = thomson['SPLINE_TE'] data_arr_err = None data_unit = flap.Unit(name='Temperature', unit='keV') elif density: data_arr = thomson['SPLINE_NE'] data_arr_err = None data_unit = flap.Unit(name='Density', unit='m-3') else: thomson_r_coord = thomson['FIT_RADII'] if pressure: data_arr = thomson['FIT_PE'] data_arr_err = thomson['FIT_PE_ERR'] data_unit = flap.Unit(name='Pressure', unit='kPa') elif temperature: data_arr = thomson['FIT_TE'] data_arr_err = thomson['FIT_TE_ERR'] data_unit = flap.Unit(name='Temperature', unit='keV') elif density: data_arr = thomson['FIT_NE'] data_arr_err = thomson['FIT_NE_ERR'] data_unit = flap.Unit(name='Density', unit='m-3') coord.append( copy.deepcopy( flap.Coordinate(name='Device R', unit='m', mode=flap.CoordinateMode(equidistant=False), values=thomson_r_coord, shape=thomson_r_coord.shape, dimension_list=[0]))) if test: plt.figure() if add_flux_coordinates: try: psi_rz_obj = flap.get_data('NSTX_MDSPlus', name='\EFIT02::\PSIRZ', exp_id=exp_id, object_name='PSIRZ_FOR_COORD') psi_mag = flap.get_data('NSTX_MDSPlus', name='\EFIT02::\SSIMAG', exp_id=exp_id, object_name='SSIMAG_FOR_COORD') psi_bdry = flap.get_data('NSTX_MDSPlus', name='\EFIT02::\SSIBRY', exp_id=exp_id, object_name='SSIBRY_FOR_COORD') except: raise ValueError("The PSIRZ MDSPlus node cannot be reached.") psi_values = psi_rz_obj.data[:, :, 32] psi_t_coord = psi_rz_obj.coordinate('Time')[0][:, 0, 0] psi_r_coord = psi_rz_obj.coordinate( 'Device R')[0][:, :, 32] #midplane is the middle coordinate in the array #Do the interpolation psi_values_spat_interpol = np.zeros( [thomson_r_coord.shape[0], psi_t_coord.shape[0]]) for index_t in range(psi_t_coord.shape[0]): norm_psi_values = (psi_values[index_t, :] - psi_mag.data[index_t] ) / (psi_bdry.data[index_t] - psi_mag.data[index_t]) norm_psi_values[np.isnan(norm_psi_values)] = 0. psi_values_spat_interpol[:, index_t] = np.interp( thomson_r_coord, psi_r_coord[index_t, :], norm_psi_values) psi_values_total_interpol = np.zeros(data_arr.shape) for index_r in range(data_arr.shape[0]): psi_values_total_interpol[index_r, :] = np.interp( thomson_time, psi_t_coord, psi_values_spat_interpol[index_r, :]) if test: for index_t in range(len(thomson_time)): plt.cla() plt.plot(thomson_r_coord, psi_values_total_interpol[:, index_t]) plt.pause(0.5) psi_values_total_interpol[np.isnan(psi_values_total_interpol)] = 0. coord.append( copy.deepcopy( flap.Coordinate(name='Flux r', unit='', mode=flap.CoordinateMode(equidistant=False), values=psi_values_total_interpol, shape=psi_values_total_interpol.shape, dimension_list=[0, 1]))) if test: plt.plot(psi_values_total_interpol, data_arr) d = flap.DataObject(data_array=data_arr, error=data_arr_err, data_unit=data_unit, coordinates=coord, exp_id=exp_id, data_title='NSTX Thomson data') if output_name is not None: flap.add_data_object(d, output_name) return d
def get_nstx_thomson_gradient(exp_id=None, pressure=False, temperature=False, density=False, r_pos=None, spline_data=False, output_name=None, device_coordinates=True, flux_coordinates=False): #Data is RADIUS x TIME if pressure + density + temperature != 1: raise ValueError( 'Only one of the inputs should be set (pressure, temperature, density)!' ) if device_coordinates + flux_coordinates != 1: raise ValueError( 'Either device_coordinates or flux_coordinates can be set, not both.' ) thomson = flap_nstx_thomson_data(exp_id, pressure=pressure, temperature=temperature, density=density, output_name='THOMSON_FOR_GRADIENT') thomson_spline = flap_nstx_thomson_data(exp_id, pressure=pressure, temperature=temperature, density=density, spline_data=True, output_name=None) if device_coordinates: radial_coordinate = thomson.coordinate('Device R')[0][:, 0] spline_radial_coordinate = thomson_spline.coordinate('Device R')[0][:, 0] if flux_coordinates: radial_coordinate = thomson.coordinate('Flux r')[0][:, 0] spline_radial_coordinate = thomson_spline.coordinate('Flux r')[0][:, 0] time_vector = thomson.coordinate('Time')[0][0, :] data = thomson.data error = thomson.error interp_data = thomson_spline.data #Calculation of the numerical gradient and interpolating the values for the given r_pos data_gradient = np.asarray([ (data[2:, i] - data[:-2, i]) / (2 * (radial_coordinate[2:] - radial_coordinate[:-2])) for i in range(len(time_vector)) ]).T data_gradient_error = np.asarray([ (np.abs(error[2:, i]) + np.abs(error[:-2, i])) / (2 * (radial_coordinate[2:] - radial_coordinate[:-2])) for i in range(len(time_vector)) ]).T interp_data_gradient = np.asarray([ (interp_data[2:, i] - interp_data[:-2, i]) / (2 * (spline_radial_coordinate[2:] - spline_radial_coordinate[:-2])) for i in range(len(time_vector)) ]).T #Interpolation for the r_pos if r_pos is not None: r_pos_gradient = np.asarray([ np.interp(r_pos, radial_coordinate[1:-1], data_gradient[:, i]) for i in range(len(time_vector)) ]) r_pos_gradient_spline = np.asarray([ np.interp(r_pos, spline_radial_coordinate[1:-1], interp_data_gradient[:, i]) for i in range(len(time_vector)) ]) ind_r = np.argmin(np.abs(radial_coordinate[1:-1] - r_pos)) if radial_coordinate[ind_r] < r_pos: R1 = radial_coordinate[1:-1][ind_r] R2 = radial_coordinate[1:-1][ind_r + 1] ind_R1 = ind_r ind_R2 = ind_r + 1 else: R1 = radial_coordinate[1:-1][ind_r - 1] R2 = radial_coordinate[1:-1][ind_r] ind_R1 = ind_r - 1 ind_R2 = ind_r #Result of error propagation (basically average biased error between the two neighboring radii) r_pos_gradient_error=np.abs((r_pos-R1)/(R2-R1))*data_gradient_error[ind_R2,:]+\ np.abs((r_pos-R2)/(R2-R1))*data_gradient_error[ind_R1,:] coord = [] coord.append( copy.deepcopy( flap.Coordinate( name='Time', unit='s', mode=flap.CoordinateMode(equidistant=True), start=time_vector[0], step=time_vector[1] - time_vector[0], #shape=time_arr.shape, dimension_list=[0]))) coord.append( copy.deepcopy( flap.Coordinate(name='Sample', unit='n.a.', mode=flap.CoordinateMode(equidistant=True), start=0, step=1, dimension_list=[0]))) if device_coordinates: grad_unit = '/m' if flux_coordinates: grad_unit = '/psi' if pressure: data_unit = flap.Unit(name='Pressure gradient', unit='kPa' + grad_unit) elif temperature: data_unit = flap.Unit(name='Temperature gradient', unit='keV' + grad_unit) elif density: data_unit = flap.Unit(name='Density gradient', unit='m-3' + grad_unit) if not spline_data: d = flap.DataObject(exp_id=exp_id, data_array=r_pos_gradient, error=r_pos_gradient_error, data_unit=data_unit, coordinates=coord, data_title='NSTX Thomson gradient') else: d = flap.DataObject(exp_id=exp_id, data_array=r_pos_gradient_spline, data_unit=data_unit, coordinates=coord, data_title='NSTX Thomson gradient spline') else: coord = [] coord.append( copy.deepcopy( flap.Coordinate( name='Time', unit='s', mode=flap.CoordinateMode(equidistant=True), start=time_vector[0], step=time_vector[1] - time_vector[0], #shape=time_arr.shape, dimension_list=[1]))) coord.append( copy.deepcopy( flap.Coordinate(name='Sample', unit='n.a.', mode=flap.CoordinateMode(equidistant=True), start=0, step=1, dimension_list=[1]))) if pressure: data_unit = flap.Unit(name='Pressure gradient', unit='kPa/m') elif temperature: data_unit = flap.Unit(name='Temperature gradient', unit='keV/m') elif density: data_unit = flap.Unit(name='Density gradient', unit='m-3/m') if device_coordinates: radial_coordinate_name = 'Device R' radial_unit = 'm' if flux_coordinates: radial_coordinate_name = 'Flux r' radial_unit = '' if not spline_data: coord.append( copy.deepcopy( flap.Coordinate( name=radial_coordinate_name, unit=radial_unit, mode=flap.CoordinateMode(equidistant=False), values=radial_coordinate[1:-1], shape=radial_coordinate[1:-1].shape, dimension_list=[0]))) d = flap.DataObject(exp_id=exp_id, data_array=data_gradient, error=data_gradient_error, data_unit=data_unit, coordinates=coord, data_title='NSTX Thomson gradient') else: coord.append( copy.deepcopy( flap.Coordinate( name=radial_coordinate_name, unit=radial_unit, mode=flap.CoordinateMode(equidistant=False), values=spline_radial_coordinate[1:-1], shape=spline_radial_coordinate[1:-1].shape, dimension_list=[0]))) d = flap.DataObject(exp_id=exp_id, data_array=interp_data_gradient, data_unit=data_unit, coordinates=coord, data_title='NSTX Thomson gradient spline') if output_name is not None: flap.add_data_object(d, output_name) return d
def show_nstx_gpi_video(exp_id=None, #Shot number time_range=None, #Time range to show the video in, if not set, the enire shot is shown z_range=None, #Range for the contour/color levels, if not set, min-max is divided logz=False, #Plot the image in a logarithmic coloring plot_filtered=False, #Plot a high pass (100Hz) filtered video normalize=None, #Normalize the video by dividing it with a processed GPI signal # options: 'Time dependent' (LPF filtered) (recommended) # 'Time averaged' (LPF filtered and averaged for the time range) # 'Simple' (Averaged) normalizer_time_range=None, #Time range for the time dependent normalization subtract_background=False, #Subtract the background from the image (mean of the time series) plot_flux=False, #Plot the flux surfaces onto the video plot_separatrix=False, #Plot the separatrix onto the video plot_limiter=False, #Plot the limiter of NSTX from EFIT flux_coordinates=False, #Plot the signal as a function of magnetic coordinates device_coordinates=False, #Plot the signal as a function of the device coordinates new_plot=True, #Plot the video into a new figure window save_video=False, #Save the video into an mp4 format video_saving_only=False, #Saving only the video, not plotting it prevent_saturation=False, #Prevent saturation of the image by restarting the colormap colormap='gist_ncar', #Colormap for the plotting cache_data=True, #Try to load the data from the FLAP storage ): if exp_id is not None: print("\n------- Reading NSTX GPI data --------") if cache_data: try: d=flap.get_data_object_ref(exp_id=exp_id,object_name='GPI') except: print('Data is not cached, it needs to be read.') d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') object_name='GPI' else: raise ValueError('The experiment ID needs to be set.') if time_range is None: print('time_range is None, the entire shot is plotted.') slicing=None else: if (type(time_range) is not list and len(time_range) != 2): raise TypeError('time_range needs to be a list with two elements.') #time_range=[time_range[0]/1000., time_range[1]/1000.] slicing={'Time':flap.Intervals(time_range[0],time_range[1])} d=flap.slice_data(object_name, exp_id=exp_id, slicing=slicing, output_name='GPI_SLICED') object_name='GPI_SLICED' if plot_filtered: print("**** Filtering GPI ****") d=flap.filter_data(object_name, exp_id=exp_id, output_name='GPI_FILTERED',coordinate='Time', options={'Type':'Highpass', 'f_low':1e2, 'Design':'Chebyshev II'}) object_name='GPI_FILTERED' if normalize is not None: print("**** Normalizing GPI ****") d=flap.get_data_object_ref(object_name) if normalize in ['Time averaged','Time dependent', 'Simple']: if normalize == 'Time averaged': coefficient=flap_nstx.analysis.calculate_nstx_gpi_norm_coeff(exp_id=exp_id, time_range=normalizer_time_range, f_high=1e2, design='Chebyshev II', filter_data=True, cache_data=True, ) if normalize == 'Time dependent': coefficient=flap.filter_data('GPI', exp_id=exp_id, output_name='GPI_LPF', coordinate='Time', options={'Type':'Lowpass', 'f_high':1e2, 'Design':'Chebyshev II'}) if slicing is not None: coefficient=coefficient.slice_data(slicing=slicing) if normalize == 'Simple': coefficient=flap.slice_data(object_name,summing={'Time':'Mean'}) data_obj=copy.deepcopy(d) data_obj.data = data_obj.data/coefficient.data flap.add_data_object(data_obj, 'GPI_DENORM') object_name='GPI_DENORM' else: raise ValueError('Normalize can either be "Time averaged","Time dependent" or "Simple".') if subtract_background: #DEPRECATED, DOESN'T DO MUCH HELP print('**** Subtracting background ****') d=flap.get_data_object_ref(object_name, exp_id=exp_id) background=flap.slice_data(object_name, exp_id=exp_id, summing={'Time':'Mean'}) data_obj=copy.deepcopy(d) data_obj.data=data_obj.data/background.data flap.add_data_object(data_obj, 'GPI_BGSUB') object_name='GPI_BGSUB' if ((plot_flux or plot_separatrix) and not flux_coordinates): print('Gathering MDSPlus EFIT data.') oplot_options={} if plot_separatrix: flap.get_data('NSTX_MDSPlus', name='\EFIT01::\RBDRY', exp_id=exp_id, object_name='SEP X OBJ' ) flap.get_data('NSTX_MDSPlus', name='\EFIT01::\ZBDRY', exp_id=exp_id, object_name='SEP Y OBJ' ) oplot_options['path']={'separatrix':{'Data object X':'SEP X OBJ', 'Data object Y':'SEP Y OBJ', 'Plot':True, 'Color':'red'}} if plot_flux: d=flap.get_data('NSTX_MDSPlus', name='\EFIT02::\PSIRZ', exp_id=exp_id, object_name='PSI RZ OBJ' ) oplot_options['contour']={'flux':{'Data object':'PSI RZ OBJ', 'Plot':True, 'Colormap':None, 'nlevel':51}} #oplot_options['line']={'trial':{'Horizontal':[[0.200,'red'],[0.250,'blue']], # 'Vertical':[[1.450,'red'],[1.500,'blue']], # 'Plot':True # }} else: oplot_options=None if flux_coordinates: print("**** Adding Flux r coordinates") d.add_coordinate(coordinates='Flux r',exp_id=exp_id) x_axis='Flux r' y_axis='Device z' if plot_separatrix: oplot_options={} oplot_options['line']={'separatrix':{'Vertical':[[1.0,'red']], 'Plot':True}} elif device_coordinates: x_axis='Device R' y_axis='Device z' else: x_axis='Image x' y_axis='Image y' if new_plot: plt.figure() if save_video: if time_range is not None: video_filename='NSTX_GPI_'+str(exp_id)+'_'+str(time_range[0])+'_'+str(time_range[1])+'.mp4' else: video_filename='NSTX_GPI_'+str(exp_id)+'_FULL.mp4' else: video_filename=None if video_saving_only: save_video=True if z_range is None: d=flap.get_data_object_ref(object_name, exp_id=exp_id) z_range=[d.data.min(),d.data.max()] if z_range[1] < 0: raise ValueError('All the values are negative, Logarithmic plotting is not allowed.') if logz and z_range[0] <= 0: print('Z range should not start with 0 when logarithmic Z axis is set. Forcing it to be 1 for now.') z_range[0]=1. if not save_video: flap.plot(object_name,plot_type='animation', exp_id=exp_id, axes=[x_axis,y_axis,'Time'], options={'Z range':z_range,'Wait':0.0,'Clear':False, 'Overplot options':oplot_options, 'Colormap':colormap, 'Log z':logz, 'Equal axes':True, 'Prevent saturation':prevent_saturation, 'Plot units':{'Time':'s', 'Device R':'m', 'Device z':'m'} }) else: if video_saving_only: import matplotlib current_backend=matplotlib.get_backend() matplotlib.use('agg') waittime=0. else: waittime=1./24. waittime=0. flap.plot(object_name,plot_type='anim-image', exp_id=exp_id, axes=[x_axis,y_axis,'Time'], options={'Z range':z_range,'Wait':0.0,'Clear':False, 'Overplot options':oplot_options, 'Colormap':colormap, 'Equal axes':True, 'Waittime':waittime, 'Video file':video_filename, 'Video format':'mp4', 'Prevent saturation':prevent_saturation, }) if video_saving_only: import matplotlib matplotlib.use(current_backend)
def show_nstx_gpi_video_frames(exp_id=None, time_range=None, start_time=None, n_frame=20, logz=False, z_range=[0,512], plot_filtered=False, normalize=False, cache_data=False, plot_flux=False, plot_separatrix=False, flux_coordinates=False, device_coordinates=False, new_plot=True, save_pdf=False, colormap='gist_ncar', save_for_paraview=False, colorbar_visibility=True ): if time_range is None and start_time is None: print('time_range is None, the entire shot is plotted.') if time_range is not None: if (type(time_range) is not list and len(time_range) != 2): raise TypeError('time_range needs to be a list with two elements.') if start_time is not None: if type(start_time) is not int and type(start_time) is not float: raise TypeError('start_time needs to be a number.') if not cache_data: #This needs to be enhanced to actually cache the data no matter what flap.delete_data_object('*') if exp_id is not None: print("\n------- Reading NSTX GPI data --------") if cache_data: try: d=flap.get_data_object_ref(exp_id=exp_id,object_name='GPI') except: print('Data is not cached, it needs to be read.') d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') else: d=flap.get_data('NSTX_GPI',exp_id=exp_id,name='',object_name='GPI') object_name='GPI' else: raise ValueError('The experiment ID needs to be set.') if time_range is None: time_range=[start_time,start_time+n_frame*2.5e-6] if normalize: flap.slice_data(object_name, slicing={'Time':flap.Intervals(time_range[0]-1/1e3*10, time_range[1]+1/1e3*10)}, output_name='GPI_SLICED_FOR_FILTERING') norm_obj=flap.filter_data('GPI_SLICED_FOR_FILTERING', exp_id=exp_id, coordinate='Time', options={'Type':'Lowpass', 'f_high':1e3, 'Design':'Elliptic'}, output_name='GAS_CLOUD') norm_obj.data=np.flip(norm_obj.data,axis=0) norm_obj=flap.filter_data('GAS_CLOUD', exp_id=exp_id, coordinate='Time', options={'Type':'Lowpass', 'f_high':1e3, 'Design':'Elliptic'}, output_name='GAS_CLOUD') norm_obj.data=np.flip(norm_obj.data,axis=0) coefficient=flap.slice_data('GAS_CLOUD', exp_id=exp_id, slicing={'Time':flap.Intervals(time_range[0],time_range[1])}, output_name='GPI_GAS_CLOUD').data data_obj=flap.slice_data('GPI', exp_id=exp_id, slicing={'Time':flap.Intervals(time_range[0],time_range[1])}) data_obj.data = data_obj.data/coefficient flap.add_data_object(data_obj, 'GPI_SLICED_DENORM') object_name='GPI_SLICED_DENORM' if plot_filtered: print("**** Filtering GPI") object_name='GPI_FILTERED' try: flap.get_data_object_ref(object_name, exp_id=exp_id) except: flap.filter_data(object_name, exp_id=exp_id, coordinate='Time', options={'Type':'Highpass', 'f_low':1e2, 'Design':'Chebyshev II'}, output_name='GPI_FILTERED') #Data is in milliseconds if plot_flux or plot_separatrix: print('Gathering MDSPlus EFIT data.') oplot_options={} if plot_separatrix: flap.get_data('NSTX_MDSPlus', name='\EFIT01::\RBDRY', exp_id=exp_id, object_name='SEP X OBJ' ) flap.get_data('NSTX_MDSPlus', name='\EFIT01::\ZBDRY', exp_id=exp_id, object_name='SEP Y OBJ' ) if plot_flux: d=flap.get_data('NSTX_MDSPlus', name='\EFIT01::\PSIRZ', exp_id=exp_id, object_name='PSI RZ OBJ' ) x_axis='Device R' y_axis='Device z' else: oplot_options=None if flux_coordinates: print("**** Adding Flux r coordinates") d.add_coordinate(coordinates='Flux r',exp_id=exp_id) x_axis='Flux r' y_axis='Device z' elif device_coordinates: x_axis='Device R' y_axis='Device z' if (not device_coordinates and not plot_separatrix and not flux_coordinates): x_axis='Image x' y_axis='Image y' if start_time is not None: start_sample_num=flap.slice_data(object_name, slicing={'Time':start_time}).coordinate('Sample')[0][0,0] if n_frame == 30: ny=6 nx=5 if n_frame == 20: ny=5 nx=4 gs=GridSpec(nx,ny) for index_grid_x in range(nx): for index_grid_y in range(ny): plt.subplot(gs[index_grid_x,index_grid_y]) if start_time is not None: slicing={'Sample':start_sample_num+index_grid_x*ny+index_grid_y} else: time=time_range[0]+(time_range[1]-time_range[0])/(n_frame-1)*(index_grid_x*ny+index_grid_y) slicing={'Time':time} d=flap.slice_data(object_name, slicing=slicing, output_name='GPI_SLICED') slicing={'Time':d.coordinate('Time')[0][0,0]} if plot_flux: flap.slice_data('PSI RZ OBJ',slicing=slicing,output_name='PSI RZ SLICE',options={'Interpolation':'Linear'}) oplot_options['contour']={'flux':{'Data object':'PSI RZ SLICE', 'Plot':True, 'Colormap':None, 'nlevel':51}} if plot_separatrix: flap.slice_data('SEP X OBJ',slicing=slicing,output_name='SEP X SLICE',options={'Interpolation':'Linear'}) flap.slice_data('SEP Y OBJ',slicing=slicing,output_name='SEP Y SLICE',options={'Interpolation':'Linear'}) oplot_options['path']={'separatrix':{'Data object X':'SEP X SLICE', 'Data object Y':'SEP Y SLICE', 'Plot':True, 'Color':'red'}} visibility=[True,True] if index_grid_x != nx-1: visibility[0]=False if index_grid_y != 0: visibility[1]=False flap.plot('GPI_SLICED', plot_type='contour', exp_id=exp_id, axes=[x_axis,y_axis,'Time'], options={'Z range':z_range, 'Interpolation': 'Closest value', 'Clear':False, 'Equal axes':True, 'Plot units':{'Device R':'m', 'Device z':'m'}, 'Axes visibility':visibility, 'Colormap':colormap, 'Colorbar':colorbar_visibility, 'Overplot options':oplot_options, }, plot_options={'levels':255}, ) actual_time=d.coordinate('Time')[0][0,0] #plt.title(str(exp_id)+' @ '+f"{actual_time*1000:.4f}"+'ms') plt.title(f"{actual_time*1000:.3f}"+'ms') if save_pdf: if time_range is not None: plt.savefig('NSTX_GPI_video_frames_'+str(exp_id)+'_'+str(time_range[0])+'_'+str(time_range[1])+'_nf_'+str(n_frame)+'.pdf') else: plt.savefig('NSTX_GPI_video_frames_'+str(exp_id)+'_'+str(start_time)+'_nf_'+str(n_frame)+'.pdf')
def nstx_gpi_velocity_analysis_spatio_temporal_displacement(exp_id=None, #Shot number time_range=None, #The time range for the calculation data_object=None, #Input data object if available from outside (e.g. generated sythetic signal) x_range=[0,63], #X range for the calculation y_range=[0,79], #Y range for the calculation x_search=10, y_search=10, fbin=10, plot=True, #Plot the results pdf=False, #Print the results into a PDF plot_error=False, #Plot the errorbars of the velocity calculation based on the line fitting and its RMS error #File input/output options filename=None, #Filename for restoring data nocalc=True, #Restore the results from the .pickle file from filename+.pickle return_results=False, ): #Constants for the calculation #Using the spatial calibration to find the actual velocities. coeff_r=np.asarray([3.7183594,-0.77821046,1402.8097])/1000. #The coordinates are in meters, the coefficients are in mm coeff_z=np.asarray([0.18090118,3.0657776,70.544312])/1000. #The coordinates are in meters, the coefficients are in mm #Input error handling if exp_id is None and data_object is None: raise ValueError('Either exp_id or data_object needs to be set for the calculation.') if data_object is None: if time_range is None and filename is None: raise ValueError('It takes too much time to calculate the entire shot, please set a time_range.') else: if type(time_range) is not list and filename is None: raise TypeError('time_range is not a list.') if filename is None and len(time_range) != 2: raise ValueError('time_range should be a list of two elements.') if data_object is not None and type(data_object) == str: if exp_id is None: exp_id='*' d=flap.get_data_object(data_object,exp_id=exp_id) time_range=[d.coordinate('Time')[0][0,0,0], d.coordinate('Time')[0][-1,0,0]] exp_id=d.exp_id flap.add_data_object(d, 'GPI_SLICED_FULL') if filename is None: wd=flap.config.get_all_section('Module NSTX_GPI')['Working directory'] comment='' filename=flap_nstx.analysis.filename(exp_id=exp_id, working_directory=wd+'/processed_data', time_range=time_range, purpose='sz velocity', comment=comment) pickle_filename=filename+'.pickle' if not os.path.exists(pickle_filename) and nocalc: print('The pickle file cannot be loaded. Recalculating the results.') nocalc=False if nocalc is False: slicing={'Time':flap.Intervals(time_range[0],time_range[1])} #Read data if data_object is None: print("\n------- Reading NSTX GPI data --------") d=flap.get_data('NSTX_GPI',exp_id=exp_id, name='', object_name='GPI') d=flap.slice_data('GPI',exp_id=exp_id, slicing=slicing, output_name='GPI_SLICED_FULL') d.data=np.asarray(d.data, dtype='float32') count=d.data.shape[0] vpol_p = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # poloidal velocity in km/sec vs. pixel vrad_p = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # radial velocity vs. pixel vpol_n = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # poloidal velocity in km/sec vs. pixel vrad_n = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) # radial velocity vs. pixel vpol = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) vrad = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) cmax_n = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) cmax_p = np.zeros([x_range[1]-x_range[0]+1,y_range[1]-y_range[0]+1,count]) sample_time=d.coordinate('Time')[0][1,0,0]-d.coordinate('Time')[0][0,0,0] ccorr_n=np.zeros([x_range[1]-x_range[0]+1, y_range[1]-y_range[0]+1, x_range[1]-x_range[0]+2*x_search+1, y_range[1]-y_range[0]+2*y_search+1]) ccorr_p=np.zeros([x_range[1]-x_range[0]+1, y_range[1]-y_range[0]+1, x_range[1]-x_range[0]+2*x_search+1, y_range[1]-y_range[0]+2*y_search+1]) for t0 in range(fbin+1,count-fbin-1): #Zero lag Autocorrelation calculation for the reference, +sample_time, -sample_time data n_data=d.data[t0-fbin-1:t0+fbin-1, x_range[0]-x_search:x_range[1]+x_search+1, y_range[0]-y_search:y_range[1]+y_search+1] acorr_pix_n=np.sqrt(np.sum((n_data-np.mean(n_data, axis=0))**2,axis=0)) p_data=d.data[t0-fbin+1:t0+fbin+1, x_range[0]-x_search:x_range[1]+x_search+1, y_range[0]-y_search:y_range[1]+y_search+1] acorr_pix_p=np.sqrt(np.sum((p_data-np.mean(p_data, axis=0))**2,axis=0)) ref_data=d.data[t0-fbin:t0+fbin, x_range[0]:x_range[1]+1, y_range[0]:y_range[1]+1] acorr_pix_ref=np.sqrt(np.sum((ref_data-np.mean(ref_data, axis=0))**2,axis=0)) print((t0-fbin-1)/(count-2*(fbin-1))*100.) #Zero lag Crosscovariance calculation for the positive and negative sample time signal for i0 in range(x_range[1]-x_range[0]+1): for j0 in range(y_range[1]-y_range[0]+1): frame_ref=d.data[t0-fbin:t0+fbin,i0+x_range[0],j0+y_range[0]] frame_ref=frame_ref-np.mean(frame_ref) for i1 in range(2*x_search+1): for j1 in range(2*y_search+1): frame_n=d.data[t0-fbin-1:t0+fbin-1, i1+i0+x_range[0]-x_search, j1+j0+y_range[0]-y_search] frame_n=frame_n-np.mean(frame_n) frame_p=d.data[t0-fbin+1:t0+fbin+1, i1+i0+x_range[0]-x_search, j1+j0+y_range[0]-y_search] frame_p=frame_p-np.mean(frame_p) ccorr_n[i0,j0,i1,j1]=np.sum(frame_ref*frame_n) ccorr_p[i0,j0,i1,j1]=np.sum(frame_ref*frame_p) #Calculating the actual cross-correlation coefficients for i0 in range(x_range[1]-x_range[0]+1): for j0 in range(y_range[1]-y_range[0]+1): vcorr_p=np.zeros([2*x_search+1,2*y_search+1]) vcorr_n=np.zeros([2*x_search+1,2*y_search+1]) for i1 in range(2*x_search+1): for j1 in range(2*y_search+1): vcorr_p[i1,j1]=ccorr_p[i0,j0,i1,j1]/(acorr_pix_ref[i0,j0]*acorr_pix_p[i0+i1,j0+j1]) vcorr_n[i1,j1]=ccorr_n[i0,j0,i1,j1]/(acorr_pix_ref[i0,j0]*acorr_pix_n[i0+i1,j0+j1]) #Calculating the displacement in pixel coordinates index_p=np.unravel_index(np.argmax(vcorr_p),shape=vcorr_p.shape) index_n=np.unravel_index(np.argmax(vcorr_n),shape=vcorr_n.shape) cmax_p[i0,j0,t0]=vcorr_p[index_p] cmax_n[i0,j0,t0]=vcorr_n[index_n] #Transforming the coordinates into spatial coordinates delta_index_p=np.asarray(index_p)-np.asarray([x_search,y_search]) delta_index_n=np.asarray(index_n)-np.asarray([x_search,y_search]) vpol_p[i0,j0,t0]=(coeff_z[0]*delta_index_p[0]+ coeff_z[1]*delta_index_p[1])/sample_time vpol_n[i0,j0,t0]=(coeff_z[0]*delta_index_n[0]+ coeff_z[1]*delta_index_n[1])/sample_time vrad_p[i0,j0,t0]=(coeff_r[0]*delta_index_p[0]+ coeff_r[1]*delta_index_p[1])/sample_time vrad_n[i0,j0,t0]=(coeff_r[0]*delta_index_n[0]+ coeff_r[1]*delta_index_n[1])/sample_time #Calculating the average between the positive and negative shifted pixels vpol_tot = (vpol_p - vpol_n)/2. # Average p and n correlations vrad_tot = (vrad_p - vrad_n)/2. # This is non causal #Averaging in an fbin long time window for t0 in range(int(fbin/2),count-int(fbin/2)): vpol[:,:,t0] = np.mean(vpol_tot[:,:,t0-int(fbin/2):t0+int(fbin/2)], axis=2) vrad[:,:,t0] = np.mean(vrad_tot[:,:,t0-int(fbin/2):t0+int(fbin/2)], axis=2) results={'Time':d.coordinate('Time')[0][:,0,0], 'Radial velocity':vrad, 'Poloidal velocity':vpol, 'Maximum correlation p':cmax_p, 'Maximum correlation n':cmax_n} pickle.dump(results, open(pickle_filename, 'wb')) else: results=pickle.load(open(pickle_filename, 'rb')) print('Data loaded from pickle file.') if pdf: pdf=PdfPages(filename.replace('processed_data', 'plots')+'.pdf') if plot: plt.figure() plt.errorbar(results['Time'], np.mean(results['Radial velocity'], axis=(0,1)), np.sqrt(np.var(results['Radial velocity'], axis=(0,1)))) plt.title('Radial velocity vs time') plt.xlabel('Time [s]') plt.ylabel('Radial velocity [m/s]') if pdf: pdf.savefig() plt.figure() plt.errorbar(results['Time'], np.mean(results['Poloidal velocity'], axis=(0,1)), np.sqrt(np.var(results['Poloidal velocity'], axis=(0,1)))) plt.title('Poloidal velocity vs time') plt.xlabel('Time [s]') plt.ylabel('Poloidal velocity [m/s]') plt.pause(0.001) if pdf: pdf.savefig() plt.figure() plt.errorbar(results['Time'], np.mean(results['Maximum correlation p'], axis=(0,1)), np.sqrt(np.var(results['Maximum correlation p'], axis=(0,1)))) plt.title('Maximum correlation p vs time') plt.xlabel('Time [s]') plt.ylabel('Maximum correlation p') plt.pause(0.001) if pdf: pdf.savefig() pdf.close() if return_results: return results