def euvData(shotno=101393, tStart=_TSTART, tStop=_TSTOP, plot=False): """"" Get EUV fan array data Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool default is False True - plots far array of all 11 (of 16) channels 'all' - plots Example ------- :: df=euvData(101393,plot=True) """ "" # subfunctions @_backupDFs def dfEUV(shotno, tStart, tStop, dfEUVMeta): dfEUV = mdsData(shotno, dfEUVMeta.addresses.to_list(), tStart, tStop, columnNames=dfEUVMeta.index.to_list()) return dfEUV # load meta data sensor = 'EUV' try: directory = _path.dirname(_path.realpath(__file__)) dfMeta = _readOdsToDF('%s/listOfAllSensorsOnHBTEP.ods' % directory, sensor).set_index('names') except: dfMeta = _readOdsToDF('listOfAllSensorsOnHBTEP.ods', sensor).set_index('names') # load raw data df = dfEUV(shotno, tStart, tStop, dfMeta) df = _filterDFByTime(df, tStart, tStop) if plot == True: df.plot() return df
def filterSignalsWithProbePhasing( dfProbe, dfSignal, plot=False, title='', timeFWHM=timeFWHM, shotno=0): dfSignalOld=dfSignal dfSignal=dfSignal.copy() def ranges(nums): nums = sorted(set(nums)) gaps = [[s, e] for s, e in zip(nums, nums[1:]) if s+1 < e] edges = iter(nums[:1] + sum(gaps, []) + nums[-1:]) return list(zip(edges, edges)) # create Mask. Only keep data wher probe current is < 0. NAN elsewhere dfMask=_pd.DataFrame(dfSignal.copy().to_numpy(),columns=['Data']) dfMask['time']=dfSignal.copy().index dfMask.Data[dfProbe.iloc[:,0].to_numpy()>0]=_np.nan # add linear interpolations to fill-in NANs a=_np.isnan(dfMask.Data.to_numpy()) indexNan=dfMask[a].index rangeNan=ranges(indexNan.to_numpy()) for i,r in enumerate(rangeNan): if r[0]==0: continue if r[-1]==dfMask.shape[0]-1: continue dfMask.Data.iloc[(r[0]):(r[1]+1)]=_np.interp(dfMask.iloc[(r[0]):(r[1]+1)].time.to_numpy(), [dfMask.iloc[(r[0]-1)].time,dfMask.iloc[(r[1]+1)].time], [dfMask.iloc[(r[0]-1)].Data,dfMask.iloc[(r[1]+1)].Data]) dfMask=dfMask.set_index('time') dfMask=dfMask.dropna() dfMask=_pd.DataFrame(_gaussianFilter_df(_pd.DataFrame(dfMask.Data.copy(),index=dfMask.index),timeFWHM=timeFWHM,plot=False,filterType='low'),index=dfMask.index) dfSignal=_filterDFByTime(dfSignal.copy(),dfMask.index[0],dfMask.index[-1]) dfResult=_pd.DataFrame(dfSignal-dfMask.Data,index=dfMask.index.to_numpy()) if plot==True: title+=', %d'%shotno fig,ax=_plt.subplots(3,sharex=True) ax[0].plot(dfProbe.index*1e3,dfProbe,label='Probe\nCurrent') ax[1].plot(dfSignalOld.index*1e3,dfSignalOld,label='Original') # ax[1].plot(dfSignalOld.index*1e3,dfSignalOld*1e4,label='Original') # ax[1].plot(dfMask.index*1e3,dfMask*1e4,label='Offset') # ax[2].plot(dfResult.index*1e3,dfResult*1e4,label='Result') ax[1].plot(dfMask.index*1e3,dfMask,label='Offset') ax[2].plot(dfResult.index*1e3,dfResult,label='Result') _plot.finalizeSubplot(ax[0], ylabel='A', title=title) _plot.finalizeSubplot(ax[1], ylabel=y2label ) _plot.finalizeSubplot(ax[2], xlabel='Time (ms)', ylabel=y2label ) _plot.finalizeFigure(fig,figSize=[6,3.5]) return dfResult
def removeMagneticOffsetWithCurrentReference( dfArrayRaw, dfCurrent, timeFWHM=4e-4, spatialFilter=False, plot=False, shotno=0, y2label='G'): """ Same as standard offset subtraction but with an extra feature. This is that an additional dataframe with an oscillating current is also include. The filter uses the time where the current is negative to determine when to create the offset. Parameters ---------- dfArrayRaw : pandas.core.frame.DataFrame Dataframe with raw signal and index of time dfCurrent : pandas.core.frame.DataFrame Dataframe with probe current and index of time timeFWHM : float time width of the Gaussian filter spatialFilter : bool optional m=n=0 filter plot : bool optional plot of results shotno : int optional title Return ------ dfFiltered : pandas.core.frame.DataFrame Dataframe with filtered output and index of time """ def filterSignalsWithProbePhasing( dfProbe, dfSignal, plot=False, title='', timeFWHM=timeFWHM, shotno=0): dfSignalOld=dfSignal dfSignal=dfSignal.copy() def ranges(nums): nums = sorted(set(nums)) gaps = [[s, e] for s, e in zip(nums, nums[1:]) if s+1 < e] edges = iter(nums[:1] + sum(gaps, []) + nums[-1:]) return list(zip(edges, edges)) # create Mask. Only keep data wher probe current is < 0. NAN elsewhere dfMask=_pd.DataFrame(dfSignal.copy().to_numpy(),columns=['Data']) dfMask['time']=dfSignal.copy().index dfMask.Data[dfProbe.iloc[:,0].to_numpy()>0]=_np.nan # add linear interpolations to fill-in NANs a=_np.isnan(dfMask.Data.to_numpy()) indexNan=dfMask[a].index rangeNan=ranges(indexNan.to_numpy()) for i,r in enumerate(rangeNan): if r[0]==0: continue if r[-1]==dfMask.shape[0]-1: continue dfMask.Data.iloc[(r[0]):(r[1]+1)]=_np.interp(dfMask.iloc[(r[0]):(r[1]+1)].time.to_numpy(), [dfMask.iloc[(r[0]-1)].time,dfMask.iloc[(r[1]+1)].time], [dfMask.iloc[(r[0]-1)].Data,dfMask.iloc[(r[1]+1)].Data]) dfMask=dfMask.set_index('time') dfMask=dfMask.dropna() dfMask=_pd.DataFrame(_gaussianFilter_df(_pd.DataFrame(dfMask.Data.copy(),index=dfMask.index),timeFWHM=timeFWHM,plot=False,filterType='low'),index=dfMask.index) dfSignal=_filterDFByTime(dfSignal.copy(),dfMask.index[0],dfMask.index[-1]) dfResult=_pd.DataFrame(dfSignal-dfMask.Data,index=dfMask.index.to_numpy()) if plot==True: title+=', %d'%shotno fig,ax=_plt.subplots(3,sharex=True) ax[0].plot(dfProbe.index*1e3,dfProbe,label='Probe\nCurrent') ax[1].plot(dfSignalOld.index*1e3,dfSignalOld,label='Original') # ax[1].plot(dfSignalOld.index*1e3,dfSignalOld*1e4,label='Original') # ax[1].plot(dfMask.index*1e3,dfMask*1e4,label='Offset') # ax[2].plot(dfResult.index*1e3,dfResult*1e4,label='Result') ax[1].plot(dfMask.index*1e3,dfMask,label='Offset') ax[2].plot(dfResult.index*1e3,dfResult,label='Result') _plot.finalizeSubplot(ax[0], ylabel='A', title=title) _plot.finalizeSubplot(ax[1], ylabel=y2label ) _plot.finalizeSubplot(ax[2], xlabel='Time (ms)', ylabel=y2label ) _plot.finalizeFigure(fig,figSize=[6,3.5]) return dfResult # clip data between 1.6e-3 and 10e-3 dfArrayRaw=_filterDFByTime(dfArrayRaw.copy(),1.6000001e-3,10.0000001e-3) dfCurrent=_filterDFByTime(dfCurrent.copy(),1.6000001e-3,10.0000001e-3) # not used. ignore if spatialFilter==True: for i,(key,val) in enumerate(dfArrayRaw.iterrows()): dfArrayRaw.loc[key,:]=val-val.mean() # perform filter on each signal dfFiltered=_pd.DataFrame(index=dfArrayRaw.index) for i,(key,val) in enumerate(dfArrayRaw.iteritems()): dfFiltered[key]=filterSignalsWithProbePhasing(dfCurrent, val.copy(), plot=plot, title=key, timeFWHM=timeFWHM, shotno=shotno) return dfFiltered
def plasmaRadiusData(shotno=95782, tStart=_TSTART, tStop=_TSTOP, plot=False, forceDownload=False): """ Calculate the major and minor radius. Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool plots all relevant plots if true default is False Notes ----- The radius calculations below are pulled from Paul Hughes's pauls_MDSplus_toolbox.py code. In that code, he attributes Niko Rath for its implementation. I don't really understand it. Example ------- :: df=plasmaRadiusData(95782,plot=True) """ @_backupDFs def dfPlasmaRadius(shotno, tStart, tStop): # Determined by Daisuke during copper plasma calibration a = .00643005 b = -1.10423 c = 48.2567 # Calculated by Jeff, but still has errors vf_pickup = 0.0046315133 * -1e-3 oh_pickup = 7.0723416e-08 # get vf and oh data dfCapBank = capBankData(shotno, tStart=tStart, tStop=tStop) vf = dfCapBank.VF_CURRENT.to_numpy() oh = dfCapBank.OH_CURRENT.to_numpy() time = dfCapBank.index.to_numpy() # get plasma current dfIp = ipData(shotno, tStart=tStart, tStop=tStop) ip = dfIp.IpRog.to_numpy() * 1212.3 * 1e-9 # ip gain # get cos-1 raw data dfCos1Rog = cos1RogowskiData(shotno, tStart=tStart, tStop=tStop) # integrate cos-1 raw from scipy.integrate import cumtrapz cos1 = cumtrapz( dfCos1Rog.COS_1_RAW, dfCos1Rog.index) + dfCos1Rog.COS_1_RAW.iloc[:-1] * 0.004571 cos1 = _np.append(cos1, 0) # r-major calculations pickup = vf * vf_pickup + oh * oh_pickup ratio = ip / (cos1 - pickup) arg = b**2 - 4 * a * (c - ratio) arg[arg < 0] = 0 r_major = (-b + _np.sqrt(arg)) / (2 * a) majorRadius = r_major / 100 # Convert to meters dfData = _pd.DataFrame() dfData['time'] = time dfData['majorRadius'] = majorRadius dfData = dfData.set_index('time') minorRadius = _np.ones(len(majorRadius)) * 0.15 minorRadius[majorRadius > 0.92] = 0.15 - ( majorRadius[majorRadius > 0.92] - 0.92) minorRadius[ majorRadius < 0.9] = 0.15 - (0.9 - majorRadius[majorRadius < 0.9]) dfData['minorRadius'] = minorRadius return dfData dfData = dfPlasmaRadius(shotno, tStart=tStart, tStop=tStop, forceDownload=forceDownload) dfData = _filterDFByTime(dfData, tStart, tStop) if plot == True: fig, ax = _plt.subplots(2, sharex=True) ax[0].plot(dfData.majorRadius) ax[1].plot(dfData.minorRadius) return dfData
def magneticSensorData(shotno=98173, tStart=_TSTART, tStop=_TSTOP, plot=False, removeBadSensors=True, sensor='TA', timeFWHMSmoothing=0.4e-3, forceDownload=False): """ Downloads magnetic sensor data. Presently, only poloidal measurements as the radial sensors are not yet implemeted. Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool plots all relevant plots if true default is False removeBadSensors : bool removes known bad sensors sensor : str sensor to evalualte. Must be in ['TA','PA1','PA2','FB'] timeFWHMSmoothing : float Time constant associated with the offset subtraction filter Returns ------- dfRaw : pandas.core.frame.DataFrame Raw magnetic data. Time is index. dfSmoothed : pandas.core.frame.DataFrame Offset subtracted magnetic data. Time is index. dfMeta : pandas.core.frame.DataFrame Meta data for the sensors Notes ----- other possible bad sensors: ['TA02_S1P','TA07_S3P','TA10_S3P'] The sensors that are bad are not necessarily consistent from shot to shot or year to year Examples -------- :: shotno=106000 a,b,c=magneticSensorData(shotno,sensor='TA',plot=True,tStart=1.5e-3,tStop=5.5e-3) a,b,c=magneticSensorData(shotno,sensor='PA1',plot=True,tStart=1.5e-3,tStop=5.5e-3) a,b,c=magneticSensorData(shotno,sensor='PA2',plot=True,tStart=1.5e-3,tStop=5.5e-3) a,b,c=magneticSensorData(shotno,sensor='FB',plot=True,tStart=1.5e-3,tStop=5.5e-3,forceDownload=True) """ if sensor not in ['TA', 'PA1', 'PA2', 'FB']: raise Exception('Bad sensor name') # subfunctions @_backupDFs def dfTARaw( shotno, dfTAMeta, tStart=_TSTART, tStop=_TSTOP, ): dfTARaw = mdsData(shotno, dfTAMeta.addresses.to_list(), tStart, tStop, columnNames=dfTAMeta.index.to_list()) return dfTARaw @_backupDFs def dfPA1Raw(shotno, dfPA1Meta, tStart=_TSTART, tStop=_TSTOP): dfPA1Raw = mdsData(shotno, dfPA1Meta.addresses.to_list(), tStart, tStop, columnNames=dfPA1Meta.index.to_list()) return dfPA1Raw @_backupDFs def dfPA2Raw(shotno, dfPA2Meta, tStart=_TSTART, tStop=_TSTOP): dfPA2Raw = mdsData(shotno, dfPA2Meta.addresses.to_list(), tStart, tStop, columnNames=dfPA2Meta.index.to_list()) return dfPA2Raw @_backupDFs def dfFBRaw(shotno, dfFBMeta, tStart=_TSTART, tStop=_TSTOP): dfFBRaw = mdsData(shotno, dfFBMeta.addresses.to_list(), tStart, tStop, columnNames=dfFBMeta.index.to_list()) return dfFBRaw # load meta data try: directory = _path.dirname(_path.realpath(__file__)) dfMeta = _readOdsToDF('%s/listOfAllSensorsOnHBTEP.ods' % directory, sensor).set_index('names') except: dfMeta = _readOdsToDF('listOfAllSensorsOnHBTEP.ods', sensor).set_index('names') # load raw data if sensor == 'TA': dfRaw = dfTARaw(shotno, dfMeta, forceDownload=forceDownload) elif sensor == 'PA1': dfRaw = dfPA1Raw(shotno, dfMeta, forceDownload=forceDownload) elif sensor == 'PA2': dfRaw = dfPA2Raw(shotno, dfMeta, forceDownload=forceDownload) elif sensor == 'FB': dfRaw = dfFBRaw(shotno, dfMeta, forceDownload=forceDownload) if removeBadSensors: dfRaw = dfRaw.drop(columns=dfMeta[dfMeta.bad == True].index.to_list()) dfMeta = dfMeta.drop(dfMeta[dfMeta.bad == True].index.to_list()) dfRaw = _filterDFByTime(dfRaw, tStart, tStop) # filter data dfSmoothed = _gaussianFilter_df(dfRaw, timeFWHM=timeFWHMSmoothing, filterType='high', plot=False) # optional stripey plots if plot == True: if 'PA' in sensor: angle = dfMeta.theta.values dfSmoothed.columns = angle dfSmoothed.index *= 1e3 fig, ax, cax = _plotHbt.stripeyPlot( dfSmoothed * 1e4, title='%d' % shotno, poloidal=True, subtitle=sensor, xlabel='Time (ms)', ylabel=r'Poloidal angle, $\theta$') elif sensor == 'TA': angle = dfMeta.phi.values dfSmoothed.columns = angle dfSmoothed.index *= 1e3 fig, ax, cax = _plotHbt.stripeyPlot( dfSmoothed * 1e4, title='%d' % shotno, toroidal=True, subtitle=sensor, xlabel='Time (ms)', ylabel=r'Toroidal angle, $\phi$') else: #FB arrays for s in ['S1P', 'S2P', 'S3P', 'S4P']: dfTemp = _filterDFByColOrIndex(dfSmoothed, s) dfTempMeta = _filterDFByColOrIndex(dfMeta, s, col=False) angle = dfTempMeta.phi.values dfTemp.columns = angle dfTemp.index *= 1e3 fig, ax, cax = _plotHbt.stripeyPlot( dfTemp * 1e4, title='%d' % shotno, toroidal=True, subtitle='FB_' + s, xlabel='Time (ms)', ylabel=r'Toroidal angle, $\phi$') return dfRaw, dfSmoothed, dfMeta
def ipData(shotno=96530, tStart=_TSTART, tStop=_TSTOP, plot=False, findDisruption=True, verbose=False, paIntegrate=True, forceDownload=False): """ Gets plasma current (I_p) data Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool plots all relevant plots if true default is False findDisruption : bool Optional. Finds the time of disruption. verbose : bool Plots intermediate steps associated with the time of disruption calculation paIntegrate : bool Integrates the PA1 and PA2 sensors to provide an alternative Ip measurement Returns ------- dfIp : pandas.core.frame.DataFrame Ip current from the Ip Rogowski (Optional) Ip current from the PA1 and PA2 sensors (Optional) Time of disruption Example: -------- :: ipData(96530,plot=True) """ # subfunctions @_backupDFs def dfIpData(shotno, tStart, tStop): dataAddress = ['\HBTEP2::TOP.SENSORS.ROGOWSKIS:IP'] df = mdsData(shotno, dataAddress, tStart, tStop, columnNames=['IpRog']) return df # download data form IPRogowski data dfIp = dfIpData(shotno, tStart=tStart, tStop=tStop, forceDownload=forceDownload) dfIp = _filterDFByTime(dfIp, tStart, tStop) # integrate PA1 sensor data to get IP if paIntegrate == True: # constants mu0 = 4 * _np.pi * 1e-7 minorRadius = 0.16 for key in ['PA1', 'PA2']: dfPA, _, _ = magneticSensorData(shotno, tStart, tStop, sensor=key, forceDownload=forceDownload) ipPAIntegration = _np.array( dfPA.sum(axis=1) * 1.0 / dfPA.shape[1] * 2 * _np.pi * minorRadius / mu0) dfIp['ip%s' % key] = ipPAIntegration # finds the time of disruption if findDisruption == True: try: dfTemp = _pd.DataFrame(dfIp.IpRog[dfIp.index > 1.5e-3]) # filter data dfTemp['HP'] = _gaussianFilter_df(dfTemp, timeFWHM=0.5e-3, filterType='high', plot=False) dfTemp['LP'] = _gaussianFilter_df(_pd.DataFrame(dfTemp['HP']), timeFWHM=0.01e-3, filterType='low', plot=False) # find time derivative of smoothed ip dfTemp['dip2dt'] = _np.gradient(dfTemp.LP.to_numpy()) # find the first large rise in d(ip2)/dt threshold = 11.0 index = _np.where(dfTemp.dip2dt > threshold)[0][0] # find the max value of ip immediately after the disrup. onset while (dfTemp.IpRog.to_numpy()[index] < dfTemp.IpRog.to_numpy()[index + 1]): index += 1 tDisrupt = dfTemp.iloc[index].name if verbose: dfTemp['timeDisruption'] = _np.zeros(dfTemp.shape[0]) dfTemp['timeDisruption'].at[tDisrupt] = dfTemp.IpRog.at[ tDisrupt] dfTemp.plot() dfIp['timeDisruption'] = _np.zeros(dfIp.shape[0]) dfIp['timeDisruption'].at[tDisrupt] = dfIp.IpRog.at[tDisrupt] except: print("time of disruption could not be found") if plot == True: fig, ax = _plt.subplots() for i, (key, val) in enumerate(dfIp.iteritems()): ax.plot(val.index * 1e3, val, label=key) _plot.finalizeSubplot(ax, xlabel='Time (ms)', ylabel='Current (A)', title='%d' % shotno) return dfIp
def capBankData(shotno=96530, tStart=_TSTART, tStop=_TSTOP, plot=False): """ Capacitor bank data. Currents. Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool plots all relevant plots if true default is False Example ------- :: capBankData(96530,plot=True) """ def plotData(df, dfTF): """ Plot all relevant plots """ _plt.figure() ax1 = _plt.subplot2grid((3, 2), (0, 1), rowspan=3) #tf ax2 = _plt.subplot2grid((3, 2), (0, 0)) #vf ax3 = _plt.subplot2grid((3, 2), (1, 0), sharex=ax2) #oh ax4 = _plt.subplot2grid((3, 2), (2, 0), sharex=ax2) #sh fig = _plt.gcf() fig.set_size_inches(10, 5) ax1.plot(dfTF.index, dfTF.TF) ax1.axvspan(df.index[0], df.index[-1], color='r', alpha=0.3) _plot.finalizeSubplot( ax1, xlabel='Time (s)', # xlim=[-150,450], ylabel='TF Field (T)', title='%d' % shotno) ax2.plot(df.index * 1e3, df.VF_CURRENT * 1e-3) _plot.finalizeSubplot(ax2, ylabel='VF Current\n(kA)') ax3.plot(df.index * 1e3, df.OH_CURRENT * 1e-3) _plot.finalizeSubplot(ax3, ylim=[-20, 30], ylabel='OH Current\n(kA)') ax4.plot(df.index * 1e3, df.SH_CURRENT * 1e-3) _plot.finalizeSubplot(ax4, ylim=[tStart, tStop], xlabel='Time (s)', ylabel='SH Current\n(kA)') # get vf data @_backupDFs def capBankData(shotno, tStart, tStop): df = mdsData(shotno=shotno, dataAddress=[ '\HBTEP2::TOP.SENSORS.VF_CURRENT', '\HBTEP2::TOP.SENSORS.OH_CURRENT', '\HBTEP2::TOP.SENSORS.SH_CURRENT' ], tStart=tStart, tStop=tStop, columnNames=['VF_CURRENT', 'OH_CURRENT', 'SH_CURRENT']) return df df = capBankData(shotno, tStart=tStart, tStop=tStop) df = _filterDFByTime(df, tStart, tStop) if plot == True: # get TF data dfTF = tfData(shotno, tStart=-0.25, tStop=0.5) # plot plotData(df, dfTF) return df
def tfData(shotno=96530, tStart=_TSTART, tStop=_TSTOP, plot=False, upSample=True, forceDownload=False): """ Toroidal field data Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool plots all relevant plots if true default is False upSample : bool up-samples the data to have a 2e-6 time step Notes ----- note that the TF field data is recorded on an A14 where most of HBTEP data is stored with the CPCI. Because the A14 has a slower sampling rate, this means that the TF data has fewer points than the rest of the HBTEP data, and this makes comparing data difficult. Therefore by default, I up-sample the data to match the CPCI sampling rate. Example ------- :: df=tfData(96530,plot=True) """ @_backupDFs def dfTF(shotno, tStart, tStop): dfOld = mdsData(shotno=shotno, dataAddress=['\HBTEP2::TOP.SENSORS.TF_PROBE'], tStart=tStart, tStop=tStop, columnNames=['TF']) dt = 2e-6 time = _np.arange(tStart, tStop + dt, dt) x = _np.interp(time, dfOld.index.to_numpy(), dfOld.TF.to_numpy()) df = _pd.DataFrame(x, index=time, columns=['TF']) df = df[(df.index >= tStart) & (df.index <= tStop)] return df df = dfTF(shotno, tStart=tStart, tStop=tStop, forceDownload=forceDownload) df = _filterDFByTime(df, tStart, tStop) if plot == True: df.plot() return df
def sxrData(shotno=98170, tStart=_TSTART, tStop=_TSTOP, plot=False, dropBadChannels=True, forceDownload=False): """ Downloads (and optionally plots) soft xray sensor data. Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool default is False dropBadChannels : bool Drops bad channels Notes ----- Channels 5, 8, 10, 13 and 15 are considered "bad". These particular channels are frequently "missing" from the tree, inlclude anamolous data, or their signals are attenuated. Example ------- :: df=sxrData(106000,plot=True,tStart=1.5e-3,tStop=5.2e-3) """ # subfunctions @_backupDFs def dfSXR(shotno, dfSXRMeta): dfSXR = mdsData(shotno, dfSXRMeta.addresses.to_list(), columnNames=dfSXRMeta.index.to_list()) return dfSXR # load meta data sensor = 'SXR' try: directory = _path.dirname(_path.realpath(__file__)) dfMeta = _readOdsToDF('%s/listOfAllSensorsOnHBTEP.ods' % directory, sensor).set_index('names') except: dfMeta = _readOdsToDF('listOfAllSensorsOnHBTEP.ods', sensor).set_index('names') # load raw data df = dfSXR(shotno, dfMeta, forceDownload=forceDownload) # drop bad channels if dropBadChannels == True: badSensors = dfMeta[dfMeta.bad == True].index.to_list() dfMeta = dfMeta.drop(badSensors, axis=0) df = df.drop(columns=badSensors) # trim time df = _filterDFByTime(df, tStart, tStop) # optional high-pass filter dfHP = _gaussianFilter_df(df, timeFWHM=0.4e-3) # optional plot if plot == True: if True: # raw data fig, ax, cax = _plot.subplotsWithColormaps(2, sharex=True) cax[0].remove() for key, val in df.iteritems(): ax[0].plot(val, label=key) _plot.finalizeSubplot( ax[0], title='%d, raw' % shotno, ) temp = _np.copy(df.columns).astype(str) columns = _np.array([ '%d' % int(temp[i][-2:]) for i in range(len(temp)) ]).astype(float) df2 = df.copy() df2.columns = columns fig, ax, _ = _plotHbt.stripeyPlot( df2, fig=fig, ax=ax[1], cax=cax[1], # title='%d, raw'%shotno, colorMap='magma_r', zlim=[0, df.max().max()], levels=_np.linspace(0, df.max().max(), 41), ylabel='Channel #') _plot.finalizeFigure(fig) if True: # filtered data fig, ax, cax = _plot.subplotsWithColormaps(2, sharex=True) cax[0].remove() for key, val in dfHP.iteritems(): ax[0].plot(val, label=key) _plot.finalizeSubplot( ax[0], title='%d, high-pass filtered' % shotno, ) temp = _np.copy(dfHP.columns).astype(str) columns = _np.array([ '%d' % int(temp[i][-2:]) for i in range(len(temp)) ]).astype(float) dfHP2 = dfHP.copy() dfHP2.columns = columns fig, ax, _ = _plotHbt.stripeyPlot( dfHP2, fig=fig, ax=ax[1], cax=cax[1], # title='%d, high-pass filtered'%shotno, ylabel='Channel #') _plot.finalizeFigure(fig) return df, dfHP
def solData(shotno=98030, tStart=_TSTART, tStop=_TSTOP, plot=False, timeFWHMSmoothing=0.4e-3): """ SOL tile sensor data Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool plots all relevant plots if true default is False timeFWHMSmoothing : float Time constant associated with the offset subtraction filter Returns ------- dfRaw : pandas.core.frame.DataFrame Raw magnetic data. Time is index. dfSmoothed : pandas.core.frame.DataFrame Offset subtracted magnetic data. Time is index. dfMeta : pandas.core.frame.DataFrame Meta data for the sensors Examples -------- :: a,b,c=solData(98173) """ # subfunctions @_backupDFs def dfSOLRaw(shotno, tStart, tStop, dfSOLMeta): dfSOLRaw = mdsData(shotno, dfSOLMeta.addresses.to_list(), tStart, tStop, columnNames=dfSOLMeta.index.to_list()) return dfSOLRaw # load meta data sensor = 'SOL' try: directory = _path.dirname(_path.realpath(__file__)) dfMeta = _readOdsToDF('%s/listOfAllSensorsOnHBTEP.ods' % directory, sensor).set_index('names') except: dfMeta = _readOdsToDF('listOfAllSensorsOnHBTEP.ods', sensor).set_index('names') # load raw data dfRaw = dfSOLRaw(shotno, tStart, tStop, dfMeta) dfRaw = _filterDFByTime(dfRaw, tStart, tStop) # filter data dfSmoothed = _gaussianFilter_df(dfRaw, timeFWHM=timeFWHMSmoothing, filterType='high', plot=False) # plot if plot == True: dfRaw.plot() dfSmoothed.plot() return dfRaw, dfSmoothed, dfMeta
def quartzJumperAndGroundingBusData(shotno=96530, tStart=_TSTART, tStop=_TSTOP, plot=False, timeFWHMSmoothing=0.4e-3, forceDownload=False): """ External rogowski data Parameters ---------- shotno : int shot number of desired data tStart : float time (in seconds) to trim data before default is 0 ms tStop : float time (in seconds) to trim data after default is 10 ms plot : bool plots all relevant plots if true default is False timeFWHMSmoothing : float Time constant associated with the offset subtraction filter Notes ----- Rog. D is permanently off for the time being Rog. C is permanently off for the time being """ # subfunctions @_backupDFs def dfJumperRaw(shotno, tStart, tStop, dfJumperMeta): dfJumperRaw = mdsData(shotno, dfJumperMeta.addresses.to_list(), tStart, tStop, columnNames=dfJumperMeta.index.to_list()) return dfJumperRaw # load meta data sensor = 'Jumper' try: directory = _path.dirname(_path.realpath(__file__)) dfMeta = _readOdsToDF('%s/listOfAllSensorsOnHBTEP.ods' % directory, sensor).set_index('names') except: dfMeta = _readOdsToDF('listOfAllSensorsOnHBTEP.ods', sensor).set_index('names') # load raw data dfRaw = dfJumperRaw(shotno, tStart, tStop, dfMeta, forceDownload=forceDownload) dfRaw['WestRackGround'] *= 100 dfRaw['NorthRackGround'] *= 100 dfRaw = _filterDFByTime(dfRaw, tStart, tStop) # filter data dfSmoothed = _gaussianFilter_df(dfRaw, timeFWHM=timeFWHMSmoothing, filterType='high', plot=False) # optional plot if plot == True: fig, ax = _plt.subplots(2, sharex=True) for i, (key, val) in enumerate(dfRaw.iteritems()): ax[0].plot(val.index * 1e3, val, label=key) for i, (key, val) in enumerate(dfSmoothed.iteritems()): ax[1].plot(val.index * 1e3, val, label=key) _plot.finalizeSubplot( ax[0], # xlabel='Time', ylabel='A', subtitle='Raw', title='%d' % shotno, # ylim=[3,15], # legendOn=False, ) _plot.finalizeSubplot( ax[1], xlabel='Time', ylabel='A', subtitle='Smoothed', ) return dfRaw, dfSmoothed, dfMeta