Exemple #1
0
 def __init__(self, folderName):
     myDATA = dotdict.dotdict()
     for x in (['B1H', 'B1V', 'B2H', 'B2V']):
         myFileList = glob.glob(folderName + '/*%s*.h5' % x)
         myTimestampList = []
         pus = []
         status = []
         if myFileList:
             fills = np.unique([
                 int(filename.split('/')[-1].split('_')[0])
                 for filename in myFileList
             ])
             df = {}
             for fill in fills:
                 df[fill] = importData.LHCFillsByNumber(fill)
                 df[fill] = df[fill][df[fill]['mode'] != 'FILL']
                 df[fill] = df[fill].reset_index(drop=True)
         for fileName in myFileList:
             fill = int((fileName.split('/')[-1].split('_'))[0])
             time = self.fromName2Timestamp(fileName.split('/')[-1])
             status.append(self.getStatus(time, fill, df))
             myTimestampList.append(time)
             pus.append(self.fromName2PU(fileName.split('/')[-1]))
         myDATA['at' + x] = pd.DataFrame(index=np.array(myTimestampList))
         myDATA['at' + x]['fileName'] = np.array(myFileList)
         myDATA['at' + x]['Status'] = status
         myDATA['at' + x]['PU'] = pus
     self.importEmptyDF = myDATA
Exemple #2
0
 def __init__(self, folderName):
     myDATA = dotdict.dotdict()
     for x in (['B1H', 'B1V', 'B2H', 'B2V']):
         myFileList = glob.glob(folderName + '/*BQ%sT.%s*.h5' %
                                (x[-1], x[0:2]))
         myTimestampList = []
         for fileName in myFileList:
             time = self.fromName2Timestamp(fileName)
             myTimestampList.append(time)
         myDATA['at' + x] = pd.DataFrame(index=np.array(myTimestampList))
         myDATA['at' + x]['fileName'] = np.array(myFileList)
     self.importEmptyDF = myDATA
Exemple #3
0
 def __init__(self, folderName):
     """
     Import the list of the DOROS acquisitions during MD4147. Sort by DOR (orbit) and DOS(oscillations) and by name of BPM. The final DataFrame will be: *.at*(DOS or DOR).*(name of BPM)
     ===EXAMPLE===
     from cl2pd import noise
     doros = noise.DOROS(/eos/user/s/skostogl/SWAN_projects/Noise/DOROS_data/181025)
     myDict= noise.doros.importEmptyDF
     """
     myDATA = dotdict.dotdict()
     for i, j in zip(['ORB', 'OSC'], ['DOR', 'DOS']):
         myFileList = glob.glob(folderName + '/%s*' % i)
         unique_bpms = self.fromName2BPM(myFileList)
         myTimestampList = []
         bpms = []
         for fileName in myFileList:
             myTimestampList.append(self.fromName2Timestamp(fileName))
             bpms.append(self.fromName2BPM(fileName))
         myDATA['at' + j] = dotdict.dotdict()
         for bpm in unique_bpms:
             idx = [a for a, b in enumerate(bpms) if b == bpm]
             myDATA['at' + j][bpm] = pd.DataFrame(
                 index=np.array(myTimestampList)[idx])
             myDATA['at' + j][bpm]['fileName'] = np.array(myFileList)[idx]
     self.importEmptyDF = myDATA
Exemple #4
0
def importEmptyDF(folderName, startFile=0, endFile=-1):
    """
    Import the list of the EPC acquisition stored in folderName.
    One can select the list of files to consider in folderName by startFile and endFile.
    ===EXAMPLE===
    from cl2pd import noise
    myDict=noise.importEmptyDF('/eos/project/a/abpdata/lhc/rawdata/power_converter')
    """
    myDATA = dotdict.dotdict()
    for i, j in zip(['current', 'voltage'], ['Current', 'Voltage']):
        myFileList = glob.glob(folderName + '/*' + i)
        myFileList.sort(key=lambda x: fromName2Timestamp(x))
        myFileList = myFileList[startFile:endFile]
        myTimestampList = []
        for fileName in myFileList:
            myTimestampList.append(fromName2Timestamp(fileName))
        myDATA['at' + j] = pd.DataFrame(index=myTimestampList)
        myDATA['at' + j]['fileName'] = myFileList
    return myDATA
Exemple #5
0
def follow_harmonics(df):
    """
  Follow 50 Hz oscillation of harmonics from the average FFT 
  """
    fs = 50000.
    lim = int(0.5 * fs / 50)
    t0 = df.iloc[0].name
    complex_fft = [[] for j in range(lim)]
    frequency = [[] for j in range(lim)]
    counter_tot = [[] for j in range(lim)]
    timestamp = [[] for j in range(lim)]
    dt = [[] for j in range(lim)]
    counter = 0
    for index, row in df.iterrows():
        print(row.name)
        data = row['data'].reshape(100, 10000)  ### Average of 100 acquisitions
        fourier = np.average(
            [abs(np.fft.fft(data[j, :])) for j in range(data.shape[0])],
            axis=0)
        fourier /= float(len(fourier)) * 2.0
        freqs = np.arange(0, len(data[0, :])) * fs / len(data[0, :])
        counter += 1
        for i in range(0, lim):
            timestamp[i].append(row.name)
            dt[i].append((row.name - t0).seconds / 60.)
            imin = (i + 1) * 50. - 10.
            imax = (i + 1) * 50. + 10.
            myfilter = (freqs > imin) & (freqs < imax)
            idx = np.argmax(abs(fourier[myfilter]))
            frequency[i].append(freqs[myfilter][idx])
            complex_fft[i].append(fourier[myfilter][idx])
            counter_tot[i].append(counter)
    df_fft = dotdict.dotdict()
    for harmonic in range(len(frequency)):
        df_fft['h%s' % harmonic] = pd.DataFrame(index=timestamp[harmonic])
        df_fft['h%s' % harmonic]['frequency'] = frequency[harmonic]
        df_fft['h%s' % harmonic]['fourier'] = complex_fft[harmonic]
        df_fft['h%s' % harmonic]['file_number'] = counter_tot[harmonic]
        df_fft['h%s' % harmonic]['dt'] = dt[harmonic]
    return df_fft
Exemple #6
0
    def getData(self,
                time_list,
                return_status=False,
                for_beam='both',
                for_plane='both',
                remove_overlap=False,
                span=3,
                buffer_size=2048,
                skip=0):

        df = dotdict.dotdict()
        if for_beam == 'both':
            beams = ['B1', 'B2']
        else:
            beams = [for_beam]
        if for_plane == 'both':
            planes = ['H', 'V']
        else:
            planes = [for_plane]

        for beam in beams:
            for plane in planes:
                df['at%s%s' % (beam, plane)] = pd.DataFrame()
                var = [
                    'LHC.BQBBQ.CONTINUOUS_HS.%s:ACQ_DATA_%s' % (beam, plane),
                    'ALB.SR4.%s:FGC_FREQ' % beam
                ]
                for time in time_list:
                    raw_data = importData.cals2pd(var, time[0], time[1])
                    if return_status:
                        raw_data['status'] = raw_data.index.map(FindStatus)
                    raw_data[var[1]] = raw_data[var[1]].interpolate(
                        limit_direction='both')
                    raw_data['frev'] = raw_data[var[1]] / 35640.
                    raw_data.dropna(subset=[var[0]], inplace=True)
                    raw_data['shape'] = raw_data[var[0]].apply(
                        lambda x: len(x))
                    raw_data = raw_data[raw_data['shape'] == buffer_size]

                    if not remove_overlap:
                        df['at%s%s' % (beam, plane)] = pd.concat(
                            [df['at%s%s' % (beam, plane)], raw_data])
                    elif not raw_data.empty:  ### Remove overlap
                        data = []
                        for i in raw_data[var[0]]:
                            data.append(i)
                        to_flatten = tuple([
                            np.array(raw_data.index),
                            np.array(data),
                            np.array(raw_data[var[1]])
                        ])
                        test = {var[0]: to_flatten}
                        flatten = {}
                        for name, (timestamps, values,
                                   values2) in test.items():
                            flatten[
                                name], timestamps2, frf2 = self.flattenoverlap(
                                    values, timestamps, values2)
                        step = 1 + skip
                        n = span * buffer_size
                        turns = np.arange(0, len(flatten[var[0]]))
                        chunk_t = [
                            turns[x] for x in xrange(0,
                                                     len(turns) - n, step)
                        ]
                        chunk_var = [
                            flatten[var[0]][x:x + n]
                            for x in xrange(0,
                                            len(flatten[var[0]]) - n, step)
                        ]
                        chunk_time = [
                            timestamps2[x]
                            for x in xrange(0,
                                            len(timestamps2) - n, step)
                        ]
                        chunk_frf = [
                            frf2[x] for x in xrange(0,
                                                    len(frf2) - n, step)
                        ]
                        raw_data2 = pd.DataFrame(
                            {
                                var[0]: chunk_var,
                                'turns': chunk_t,
                                var[1]: chunk_frf
                            },
                            index=chunk_time)
                        raw_data2['frev'] = raw_data2[var[1]] / 35640.
                        raw_data2['shape'] = raw_data2[var[0]].apply(
                            lambda x: len(x))
                        df['at%s%s' % (beam, plane)] = raw_data2
        return df
Exemple #7
0
def heatmaps(df,
             status='all',
             beam='all',
             plane='all',
             mode='amplitude',
             threshold=None,
             pval_threshold=None,
             search='h',
             flag_set_lim=True,
             harms='all',
             ax=None):
    """
  Heatmaps with correlations
  Input: -df
         -status eg. 'all', ['RAMP'], ['FLATTOP', 'RAMP']
         -beam eg. ['B1', 'B2'], 'all'
         -plane eg. ['H'], 'all'
         -mode eg. 'amplitude', 'angle'
         -threshold: sets a threshold to the correlation values
         -pval_threshold: sets a threshold to the p-values
         -search: 'h' means harmonics of 50Hz, 'f' means bins
         -flag_set_lim: set limit to the colorbar of the heatmap
         -harms: 'all', ['h1', 'h2']
         -ax: pass specific subplot
  Output: -correlation matrix
          -strongest absolute correlations, 300 first, or if threshold is defined the correlations > threshold
          -bins of harmonics
          -matrix with p-values
  """

    corr_tot = dotdict.dotdict()
    pval_tot = dotdict.dotdict()
    strongest = dotdict.dotdict()

    if ax is not None:
        counter_beam = 0
        counter_plane = 0

    if beam == 'all':
        beam = df[df.keys()[0]]['beam'].unique()
    for beams in beam:
        if plane == 'all':
            plane = df[df.keys()[0]]['plane'].unique()
        for planes in plane:
            if planes == plane[0] and ax is not None:
                counter_plane = 0
            if status == 'all':
                stat = df[df.keys()[0]]['status'].unique()
            for stat in status:
                print beams, planes, stat
                dfnew = pd.DataFrame()
                if harms == 'all':
                    max_lim = len(
                        [k for k in df.keys() if k.startswith(search)])
                    min_lim = 0
                elif type(harms) is list:
                    min_lim = harms[0]
                    max_lim = harms[1]
                bins_tot = []
                for h in range(min_lim, max_lim):
                    group = df['%s%s' % (search, h)]
                    group = group[(group['beam'] == beams)
                                  & (group['plane'] == planes) &
                                  (group['status'] == stat)]
                    bins_tot.append(group['bin'].iloc[0])
                    if mode == 'amplitude':
                        dfnew = pd.concat([dfnew, group['fourier'].abs()],
                                          axis=1,
                                          ignore_index=True)
                    elif mode == 'angle':
                        signal1 = np.unwrap(np.angle(group['fourier']))
                        turns = group['turns']
                        x0 = np.angle(group['fourier'])[0]
                        f = group['f'].iloc[0]
                        signal2 = x0 + 2.0 * np.pi * f * turns
                        dfnew = pd.concat([dfnew, signal1 - signal2],
                                          axis=1,
                                          ignore_index=True)
                corr = dfnew.corr()
                pval = calculate_pvalues(dfnew)
                if ax is None:
                    fig1, ax1 = plt.subplots(figsize=(10, 8))
                else:
                    try:
                        plt.sca(ax[counter_plane, counter_beam])
                    except:
                        plt.sca(ax[counter_beam])
                plt.title('%s , %s%s , %s' % (mode, beams, planes, stat),
                          fontsize=16)
                plt.xlabel('h')
                plt.ylabel('h')

                for i in range(corr.shape[0]):
                    corr.iloc[i, i] = 0.0
                if threshold is not None:
                    if pval_threshold is None:
                        corr[corr.abs() < threshold] = 0.
                    else:
                        mask = (pval < pval_threshold) & (corr.abs() >
                                                          threshold)
                        corr = corr * mask
                elif pval_threshold is not None:
                    mask = (pval < pval_threshold)
                    corr = corr * mask
                if flag_set_lim:
                    axs = sns.heatmap(
                        corr,
                        xticklabels=15,
                        yticklabels=15,
                        vmin=-1,
                        vmax=1,
                        cbar_kws={'label': 'Correlation coefficients'},
                        cmap='seismic')
                else:
                    axs = sns.heatmap(
                        corr,
                        xticklabels=15,
                        yticklabels=15,
                        cbar_kws={'label': 'Correlation coefficients'},
                        cmap='seismic',
                        ax=ax[counter_beam, counter_plane])
                plt.tight_layout()
                #plt.show()
                a = get_top_abs_correlations(dfnew,
                                             n1=0,
                                             n2=300,
                                             threshold=threshold)
                print 'Top Absolute Correlations'
                print a
                corr_tot['%s_%s_%s_%s' % (beams, planes, stat, mode)] = corr
                pval_tot['%s_%s_%s_%s' % (beams, planes, stat, mode)] = pval
                strongest['%s_%s_%s_%s' % (beams, planes, stat, mode)] = a
            if ax is not None:
                counter_plane += 1
        if ax is not None:
            counter_beam += 1
    return corr_tot, strongest, bins_tot, pval_tot
Exemple #8
0
def get_fft(df, fr=50, df_fft=None):
    """
  Computes FFT and organizes by harmonics of 50Hz if fr=50 (h1, h2..) or by bins if fr is a list (f1, f2 ..)
  If df_fft is provided new frequency branches will be appended to the existing df_fft
  If the index turns is detected(slidng window) the first turn of the sliding window will be saved in a new column of the dataframe
  """

    h = 35640
    if df_fft is None:
        df_fft = dotdict.dotdict()
    if type(fr) is not list:
        lim = 112  ## int(frev/50/2)
    else:
        lim = len(np.arange(fr[0], fr[-1]))
    lists = [[] for j in range(lim)]
    flag_t = False
    for beam in df.keys():
        for plane in df[beam].keys():
            for status in df[beam][plane]['tbt'].keys():
                group = df[beam][plane]['tbt'][status].dropna()
                print beam, plane, status
                for i in range(len(group)):
                    data = group.iloc[i]
                    if 'turns' in data.index:
                        date = data.timestamps[0]
                        turns = data.turns[0]
                    else:
                        date = data.name
                    fourier = np.fft.fft(
                        data['LHC.BQBBQ.CONTINUOUS_HS.%s:ACQ_DATA_%s' %
                             (beam, plane)])
                    freq = np.fft.fftfreq(
                        len(data['LHC.BQBBQ.CONTINUOUS_HS.%s:ACQ_DATA_%s' %
                                 (beam, plane)]))
                    if type(fr) is not list:
                        if 'turns' in data.index:
                            frf = data['ALB.SR4.%s:FGC_FREQ' % beam][0]
                        else:
                            frf = data['ALB.SR4.%s:FGC_FREQ' % beam]
                        harm = fr / (frf / h)
                        indexes = [
                            int(k * harm * len(fourier))
                            for k in range(1, lim + 1)
                        ]
                    else:
                        indexes = np.arange(fr[0], fr[-1])
                    for j in range(len(indexes)):
                        if 'turns' in data.index:
                            flag_t = True
                            lists[j].append([
                                status, j, indexes[j], freq[indexes[j]],
                                fourier[indexes[j]], date, turns, beam, plane
                            ])
                        else:
                            lists[j].append([
                                status, j, indexes[j], freq[indexes[j]],
                                fourier[indexes[j]], date, beam, plane
                            ])

    if type(fr) is not list:
        key = 'h'
    else:
        key = 'f'
    for j in range(lim):
        if flag_t:
            df_fft['%s%s' % (key, str(j))] = pd.DataFrame(
                data=lists[j],
                columns=[
                    'status', 'h', 'bin', 'f', 'fourier', 'timestamps',
                    'turns', 'beam', 'plane'
                ])
        else:
            df_fft['%s%s' % (key, str(j))] = pd.DataFrame(data=lists[j],
                                                          columns=[
                                                              'status', 'h',
                                                              'bin', 'f',
                                                              'fourier',
                                                              'timestamps',
                                                              'beam', 'plane'
                                                          ])
            df_fft['%s%s' % (key, str(j))].set_index(['timestamps'],
                                                     inplace=True)
    return df_fft
Exemple #9
0
def get_data(modes,
             time,
             rename_duplicates=False,
             remove_overlap=False,
             n=8000):
    """
    TbT data for the modes and time specified
    If the same mode is specified more than once, it will be renamed
    If remove_overlap is True, it will remove the overlap and combine the data with a sliding window of 8000 turns
    Input: list of modes & dictionary time with start and end time for each key
    Output: df-> beam ('B1, B2') -> plane ('H', 'V') - > 'tbt' (Tbt data & frev interpolated) 
    """

    df = dotdict.dotdict()
    beams = ['B1', 'B2']
    planes = ['H', 'V']
    for beam in beams:
        df[beam] = dotdict.dotdict()
        for plane in planes:
            df[beam][plane] = dotdict.dotdict()
            df[beam][plane]['tbt'] = dotdict.dotdict()
            var = [
                'LHC.BQBBQ.CONTINUOUS_HS.%s:ACQ_DATA_%s' % (beam, plane),
                'ALB.SR4.%s:FGC_FREQ' % beam
            ]
            if rename_duplicates:
                counter_dup = 0
            for mode in modes:
                if mode in df[beam][plane]['tbt'] and rename_duplicates:
                    print "Renaming key..."
                    counter_dup += 1
                    new_mode = '%s%s' % (mode, str(counter_dup))
                    df[beam][plane]['tbt'][new_mode] = dotdict.dotdict()
                else:
                    df[beam][plane]['tbt'][mode] = dotdict.dotdict()
                    new_mode = mode
                if time[new_mode][0] == 'all':
                    raw_data = importData.LHCCals2pd(var,
                                                     time[mode][1],
                                                     beamModeList=mode)
                else:
                    t1 = time[new_mode][0]
                    t2 = time[new_mode][1]
                    raw_data = importData.cals2pd(var, t1, t2)
                raw_data['status'] = new_mode
                raw_data['ALB.SR4.%s:FGC_FREQ' %
                         beam] = raw_data['ALB.SR4.%s:FGC_FREQ' %
                                          beam].interpolate(
                                              limit_direction='both')
                if not remove_overlap:
                    df[beam][plane]['tbt'][new_mode] = raw_data
                else:
                    raw_data = raw_data.dropna(subset=[var[0]])
                    m = []
                    for i in raw_data[var[0]]:
                        m.append(i)
                    m = np.array(m)
                    test2 = tuple([
                        np.array(raw_data.index), m,
                        np.array(raw_data[var[1]])
                    ])
                    test = {var[0]: test2}
                    flatten = {}
                    for name, (timestamps, values, values2) in test.items():
                        flatten[name], timestamps2, frf2 = flattenoverlap(
                            values, timestamps, values2)
                    step = 1
                    #n = 8000
                    turns = np.arange(0, len(flatten[var[0]]))
                    chunk_t = [
                        turns[x:x + n] for x in xrange(0,
                                                       len(turns) - n, step)
                    ]
                    chunk_var = [
                        flatten[var[0]][x:x + n]
                        for x in xrange(0,
                                        len(flatten[var[0]]) - n, step)
                    ]
                    chunk_time = [
                        timestamps2[x:x + n]
                        for x in xrange(0,
                                        len(timestamps2) - n, step)
                    ]
                    chunk_frf = [
                        frf2[x:x + n] for x in xrange(0,
                                                      len(frf2) - n, step)
                    ]
                    raw_data2 = pd.DataFrame({
                        var[0]: chunk_var,
                        'turns': chunk_t,
                        'timestamps': chunk_time,
                        var[1]: chunk_frf,
                        'status': new_mode
                    })
                    df[beam][plane]['tbt'][new_mode] = raw_data2

    return df