示例#1
0
 def GetBurg(self, X):
     import spectrum
     AR, rho, ref = spectrum.arburg(X, order=20)
     psd = spectrum.arma2psd(AR, rho=rho, NFFT=4096)
     psd = psd[len(psd):len(psd) / 2:-1]
     p = 10 * np.log(abs(psd) * 2. / (2. * np.pi))
     F = spectrum.linspace(0, 80, len(p))
     return p, F
 def _autoreg(datum):
     order = 4
     try:
         coef, _, _ = arburg(datum, order)
         coef = coef.real.tolist()
     except ValueError:
         coef = [0] * order
     return coef
示例#3
0
def autoreg_coeff(df, order=1):
    """
    Estimate the complex autoregressive parameters by the Burg algorithm
    :return P: Real variable representing driving noise variance (mean square of residual noise) 
    from the whitening operation of the Burg filter. 
    """
    AR, P, k = arburg(df, order=order)
    return P
def arburg(*args):
    """From MATLAB:
    
    %ARBURG   AR parameter estimation via Burg method.
    %   A = ARBURG(X,ORDER) returns the polynomial A corresponding to the AR
    %   parametric signal model estimate of vector X using Burg's method.
    %   ORDER is the model order of the AR system.
    %
    %   [A,E] = ARBURG(...) returns the final prediction error E (the variance
    %   estimate of the white noise input to the AR model).
    %
    %   [A,E,K] = ARBURG(...) returns the vector K of reflection 
    %   coefficients (parcor coefficients).
    
    Using spectrum arburg:
    def arburg(X, order, criteria=None):
        Estimate the complex autoregressive parameters by the Burg algorithm.
        
        .. math:: x(n) = \sqrt{(v}) e(n) + \sum_{k=1}^{P+1} a(k) x(n-k)
        
        :param x:  Array of complex data samples (length N)
        :param order: Order of autoregressive process (0<order<N)
        :param criteria: select a criteria to automatically select the order 
        
        :return:
        * A Array of complex autoregressive parameters A(1) to A(order). First 
        value (unity) is not included !! 
        * P Real variable representing driving noise variance (mean square 
        of residual noise) from the whitening operation of the Burg 
        filter.
        * reflection coefficients defining the filter of the model.
    """
    x = args[0]
    p = args[1]
    if len(args) == 3:
        criteria = args[2]
        [A, E, K] = spectrum.arburg(x, p, criteria)
    else:
        [A, E, K] = spectrum.arburg(x, p)
    A = np.hstack((1, A)) # Adding unity, MATLAB gives it
    return A, E
示例#5
0
def arburg(*args):
    """From MATLAB:
    
    %ARBURG   AR parameter estimation via Burg method.
    %   A = ARBURG(X,ORDER) returns the polynomial A corresponding to the AR
    %   parametric signal model estimate of vector X using Burg's method.
    %   ORDER is the model order of the AR system.
    %
    %   [A,E] = ARBURG(...) returns the final prediction error E (the variance
    %   estimate of the white noise input to the AR model).
    %
    %   [A,E,K] = ARBURG(...) returns the vector K of reflection 
    %   coefficients (parcor coefficients).
    
    Using spectrum arburg:
    def arburg(X, order, criteria=None):
        Estimate the complex autoregressive parameters by the Burg algorithm.
        
        .. math:: x(n) = \sqrt{(v}) e(n) + \sum_{k=1}^{P+1} a(k) x(n-k)
        
        :param x:  Array of complex data samples (length N)
        :param order: Order of autoregressive process (0<order<N)
        :param criteria: select a criteria to automatically select the order 
        
        :return:
        * A Array of complex autoregressive parameters A(1) to A(order). First 
        value (unity) is not included !! 
        * P Real variable representing driving noise variance (mean square 
        of residual noise) from the whitening operation of the Burg 
        filter.
        * reflection coefficients defining the filter of the model.
    """
    x = args[0]
    p = args[1]
    if len(args) == 3:
        criteria = args[2]
        [A, E, K] = spectrum.arburg(x, p, criteria)
    else:
        [A, E, K] = spectrum.arburg(x, p)
    A = np.hstack((1, A))  # Adding unity, MATLAB gives it
    return A, E
示例#6
0
def areg(results, matrix, suffix):
    corrs = [[] for _ in range(4)]
    for v in matrix:
        l = sorted(list(v))
        a = arburg(l, 4)
        for i in range(4):
            corrs[i].append(np.real(a[0][i]))

    if len(suffix):
        suffix += ","
    for i in range(4):
        results["tTotalAcc-arCoeff()-" + suffix + str(i + 1)] = corrs[i]
def compute_autoregressive(eeg,order):
	output_structure = {}
	for subset_name in eeg.keys():
		output_structure.update({subset_name : {}})
		for class_name in eeg[subset_name].keys():
			output_structure[subset_name].update({class_name : []})
			for i in range(len(eeg[subset_name][class_name])):
				ar_models = []
				for j in range(eeg[subset_name][class_name][i].shape[1]):
					model = spectrum.arburg(eeg[subset_name][class_name][i][:,j],order=order,criteria=None)[0]
					model = [item.real for item in model]
					ar_models.append(model)
				output_structure[subset_name][class_name].append(np.array(ar_models).transpose())
	return output_structure
示例#8
0
def burg(correlation, order):
    """
    Calculate the predictor coefficients for autoregressive linear prediction
    using the Burg algorithm

    Parameters
    ----------
    correlation : numpy array
        The autocorrelation function of a signal.
    order : int
        The order of the prediction.

    Returns
    -------
    coeffs : numpy array
        The calculated prediction coefficients
    energy : float
        The estimated residual error energy after
        prediction

    Notes
    -----

    * The first coefficient, 1, is left out.

    """
    if not order > 0:
        raise ValueError("order must be greater than zero")

    coeffs, energy, reflectioncoeffs = spectrum.arburg(correlation, order)

    # These values are pure speculation
    energy /= correlation[0]
    # After this step, the curve looked like the one from levinson BEFORE
    # THIS STEP. This means we need to divide energy by it again. Also,
    # the values are much much lower than from levinson, the proportions
    # are almost exactly the same. Ergo: multiply by the ratio between them.
    energy *= (18600 / 42)
    energy /= correlation[0]
    energy = min(energy, 1)

    return (coeffs, energy)
def fill_noise(nW, preY, postY, SIG=3.0, orderAR=4, debug=False):
    # SIG - Before calculating fill remove SIG outliers
    # orderAR - order of the AR model to fit to data

    # For misbehaving light curves
    # too much data is clipped out such that <orderAR data is left
    #  and arburg errors out
    # detect when too much data has been removed and increase SIG and
    #  dont use robust mad
    oLen = len(preY)
    preYStd = robust.mad(preY)
    preYMn = np.mean(preY)
    preY = preY - preYMn
    idx = np.where((np.abs(preY / preYStd) < SIG))[0]
    sigmults = [1.0, 1.0, 1.5, 2.0]
    if len(idx) < math.floor(0.8 * oLen):
        # Too much data removed try to increase SIG and less robust std
        cnt = 0
        while cnt < 2 and len(idx) < math.floor(0.8 * oLen):
            cnt = cnt + 1
            preYStd = np.std(preY)
            newSig = SIG * sigmults[cnt]
            idx = np.where((np.abs(preY / preYStd) < newSig))[0]
    preY = preY[idx]
    oLen = len(postY)
    postYStd = robust.mad(postY)
    postYMn = np.mean(postY)
    postY = postY - postYMn
    idx = np.where((np.abs(postY / postYStd) < SIG))[0]
    if len(idx) < math.floor(0.8 * oLen):
        # Too much data removed try to increase SIG and less robust std
        cnt = 0
        while cnt < 2 and len(idx) < math.floor(0.8 * oLen):
            cnt = cnt + 1
            postYStd = np.std(postY)
            newSig = SIG * sigmults[cnt]
            idx = np.where((np.abs(postY / postYStd) < newSig))[0]
    postY = postY[idx]
    if len(preY) >= orderAR:
        # Get the autocorrelation coeffs from preData
        preAR, preVar, k = arburg(preY, orderAR)
        # Now make the preFillData
        # Construt initial conditions
        lfil_ic = sig.lfiltic([1], np.insert(preAR, 0, 1.0), preY[-orderAR:])
        lfilter_result = sig.lfilter([1], np.insert(preAR,0,1.0), \
                np.random.normal(scale=np.sqrt(preVar), size=(nW,)), \
                zi=lfil_ic)
        preFill = np.real(lfilter_result[0])
    else:
        # as backup if the length of the pre data is too short do random
        preFill = np.random.normal(scale=preYStd, size=(nW, ))
        preVar = np.power(preYStd, 2)
    if debug:
        print(np.var(preFill), preVar)

    # Now do postData
    if len(postY) >= orderAR:
        postAR, postVar, k = arburg(postY, orderAR)
        lfil_ic = sig.lfiltic([1], np.insert(postAR, 0, 1.0),
                              np.flip(postY[0:orderAR], axis=0))
        lfilter_result = sig.lfilter([1], np.insert(postAR,0,1.0), \
                np.random.normal(scale=np.sqrt(postVar), size=(nW,)), \
                zi=lfil_ic)
        postFill = np.real(lfilter_result[0])
    else:
        postFill = np.random.normal(scale=postYStd, size=(nW, ))
        postVar = np.power(postYStd, 2)
    # final fill is linear weighted combination
    runN = np.arange(nW)
    preRamp = runN / np.float(nW)
    postRamp = 1.0 - preRamp
    # This scaling factor is necessary to linearly transition from the variance
    #  of the pre portion to the post portion
    scl = np.sqrt(
        (preRamp * (postVar - preVar) + preVar) /
        (preRamp * preRamp * preVar + np.power(1.0 - preRamp, 2) * postVar))

    finalFill = (preFill * preRamp + postFill * postRamp) * scl
    if debug:
        print(np.var(postFill), postVar)

        allN = len(preY) + len(finalFill) + len(postY)
        runN = np.arange(allN)
        showFill = preY
        showClr = np.zeros_like(preY)
        showFill = np.append(showFill, finalFill)
        showClr = np.append(showClr, np.ones_like(finalFill))
        showFill = np.append(showFill, postY)
        showClr = np.append(showClr, np.zeros_like(postY))
        plt.scatter(runN, showFill, c=showClr)
        plt.show()
    return finalFill
print(P)
print(k)

'''
def s(f):
    a = [0] * long
    for i in range (long):
          a[i]=((P))/abs(1-A[0]*sp.exp(-1*1j* f[i] *2*pi  )-A[1]*sp.exp(-2*1j *f[i]*2*pi  ))**2

    return a

frequency=np.linspace(0,0.5,long)
T=[0]*720
for i in range(0,720-long):
   temp=aclose[i:i+long]
   A,P,k=arburg(X=temp,order=2)
   result=s(frequency)
   #result=np.int64(result)
   tmp=max( enumerate(result),key=lambda x:x[1])
   T[i]=(1/max([tmp[1]]))


print(len(T))
tmp=[]
for i in range(0,len(T)-10):
    tmp.append(np.mean(T[i:i+5]))


#result=s(frequency*2*pi)

plt.subplot(2,1,1)
示例#11
0
def create_all_psd():


    f = pylab.linspace(0, 1, 4096)
    pylab.clf()

    pylab.figure(figsize=(12,8))

    #MA 15 order
    b, rho = spectrum.ma(data, 15, 30)
    psd = spectrum.arma2psd(B=b, rho=rho)
    newpsd = tools.cshift(psd, len(psd)//2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='MA 15')

    #ARMA 15 order
    a, b, rho = spectrum.arma_estimate(data, 15,15, 30)
    psd = spectrum.arma2psd(A=a,B=b, rho=rho)
    newpsd = tools.cshift(psd, len(psd)//2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='ARMA 15,15')

    #yulewalker
    ar, P,c = spectrum.aryule(data, 15, norm='biased')
    psd = spectrum.arma2psd(A=ar, rho=P)
    newpsd = tools.cshift(psd, len(psd)//2) # switch positive and negative freq

    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='YuleWalker 15')

    #burg method
    ar, P,k = spectrum.arburg(data, order=15)
    psd = spectrum.arma2psd(A=ar, rho=P)
    newpsd = tools.cshift(psd, len(psd)//2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='Burg 15')

    #covar method
    af, pf, ab, pb, pv = spectrum.arcovar_marple(data, 15)
    psd = spectrum.arma2psd(A=af, B=ab, rho=pf)
    newpsd = tools.cshift(psd, len(psd)//2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='covar 15')

    #modcovar method
    a, p, pv = spectrum.modcovar_marple(data, 15)
    psd = spectrum.arma2psd(A=a)
    newpsd = tools.cshift(psd, len(psd)//2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='modcovar 15')

    #correlogram
    psd = spectrum.CORRELOGRAMPSD(data, data, lag=15)
    newpsd = tools.cshift(psd, len(psd)/2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='correlogram 15')

    #minvar
    psd = spectrum.minvar(data, 15)
    #newpsd = tools.cshift(psd, len(psd)/2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd/max(newpsd)), label='MINVAR 15')

    #music
    psd,db = spectrum.music(data, 15, 11)
    pylab.plot(f, 10 * pylab.log10(psd/max(psd)), '--',label='MUSIC 15')

    #ev music
    psd,db = spectrum.ev(data, 15, 11)
    pylab.plot(f, 10 * pylab.log10(psd/max(psd)), '--',label='EV 15')


    pylab.legend(loc='upper left', prop={'size':10}, ncol=2)
    pylab.ylim([-80,10])
    pylab.savefig('psd_all.png')
示例#12
0
 def ar(row):
   v1, _, _ = arburg(row, model_order)
   return v1
示例#13
0
def create_all_psd():

    f = pylab.linspace(0, 1, 4096)
    pylab.clf()

    pylab.figure(figsize=(12, 8))

    #MA 15 order
    b, rho = spectrum.ma(data, 15, 30)
    psd = spectrum.arma2psd(B=b, rho=rho)
    newpsd = tools.cshift(psd,
                          len(psd) // 2)  # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd / max(newpsd)), label='MA 15')

    #ARMA 15 order
    a, b, rho = spectrum.arma_estimate(data, 15, 15, 30)
    psd = spectrum.arma2psd(A=a, B=b, rho=rho)
    newpsd = tools.cshift(psd,
                          len(psd) // 2)  # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd / max(newpsd)), label='ARMA 15,15')

    #yulewalker
    ar, P, c = spectrum.aryule(data, 15, norm='biased')
    psd = spectrum.arma2psd(A=ar, rho=P)
    newpsd = tools.cshift(psd,
                          len(psd) // 2)  # switch positive and negative freq

    pylab.plot(f,
               10 * pylab.log10(newpsd / max(newpsd)),
               label='YuleWalker 15')

    #burg method
    ar, P, k = spectrum.arburg(data, order=15)
    psd = spectrum.arma2psd(A=ar, rho=P)
    newpsd = tools.cshift(psd,
                          len(psd) // 2)  # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd / max(newpsd)), label='Burg 15')

    #covar method
    af, pf, ab, pb, pv = spectrum.arcovar_marple(data, 15)
    psd = spectrum.arma2psd(A=af, B=ab, rho=pf)
    newpsd = tools.cshift(psd,
                          len(psd) // 2)  # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd / max(newpsd)), label='covar 15')

    #modcovar method
    a, p, pv = spectrum.modcovar_marple(data, 15)
    psd = spectrum.arma2psd(A=a)
    newpsd = tools.cshift(psd,
                          len(psd) // 2)  # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd / max(newpsd)), label='modcovar 15')

    #correlogram
    psd = spectrum.CORRELOGRAMPSD(data, data, lag=15)
    newpsd = tools.cshift(psd,
                          len(psd) / 2)  # switch positive and negative freq
    pylab.plot(f,
               10 * pylab.log10(newpsd / max(newpsd)),
               label='correlogram 15')

    #minvar
    psd = spectrum.minvar(data, 15)
    #newpsd = tools.cshift(psd, len(psd)/2) # switch positive and negative freq
    pylab.plot(f, 10 * pylab.log10(newpsd / max(newpsd)), label='MINVAR 15')

    #music
    psd, db = spectrum.music(data, 15, 11)
    pylab.plot(f, 10 * pylab.log10(psd / max(psd)), '--', label='MUSIC 15')

    #ev music
    psd, db = spectrum.ev(data, 15, 11)
    pylab.plot(f, 10 * pylab.log10(psd / max(psd)), '--', label='EV 15')

    pylab.legend(loc='upper left', prop={'size': 10}, ncol=2)
    pylab.ylim([-80, 10])
    pylab.savefig('psd_all.png')
示例#14
0
def calculateFDindexes(RR,  Finterp):
    
    def power(spec,freq,fmin,fmax):
        #returns power in band
        band = np.array([spec[i] for i in range(len(spec)) if freq[i] >= fmin and freq[i]<fmax])
        powerinband = np.sum(band)/len(spec)
        return powerinband
   
    def InterpolateRR(RR, Finterp):
        # returns cubic spline interpolated array with sample rate = Finterp
        step=1/Finterp 
        BT=np.cumsum(RR) 
        xmin=BT[0]
        xmax=BT[-1]
        BT = np.insert(BT,0,0)
        BT=np.append(BT, BT[-1]+1)
        RR = np.insert(RR,0,0)
        RR=np.append(RR, RR[-1])
        
        tck = interpolate.splrep(BT,RR)
        BT_interp = np.arange(xmin,xmax,step)
        RR_interp = interpolate.splev(BT_interp,  tck)
        return RR_interp,  BT_interp
    
    RR=RR/1000 #RR in seconds
    RR_interp, BT_interp=InterpolateRR(RR, Finterp)
    RR_interp=RR_interp-np.mean(RR_interp)

    freqs=np.arange(0, 2, 0.0001)
    
    # calculates AR coefficients
    AR, P, k = spct.arburg(RR_interp*1000, 16) #burg
    
    # estimates PSD from AR coefficients
    spec = spct.arma2psd(AR,  T=0.25, NFFT=2*len(freqs)) # pectrum estimation
    spec = spec[0:len(spec)/2]   
    
    # WELCH psd estimation
    
    # calculates power in different bands
    VLF=power(spec,freqs,0,0.04)
    LF=power(spec,freqs,0.04,0.15)
    HF=power(spec,freqs,0.15,0.4)
    Total=power(spec,freqs,0,2)
    LFHF = LF/HF
    nVLF=VLF/Total # Normalized
    nLF=LF/Total
    nHF=HF/Total
    
    #NormalizedHF HFNormal

    LFn=LF/(HF+LF)
    HFn=HF/(HF+LF)
    Power = [VLF, HF, LF]
    
    Power_Ratio= Power/sum(Power)
#    Power_Ratio=spec/sum(spec) # uncomment to calculate Spectral Entropy using all frequencies
    Spectral_Entropy = 0
    lenPower=0 # tengo conto delle bande che ho utilizzato
    for i in xrange(0, len(Power_Ratio)):
        if Power_Ratio[i]>0: # potrei avere VLF=0
            Spectral_Entropy += Power_Ratio[i] * np.log(Power_Ratio[i])
            lenPower +=1
    Spectral_Entropy /= np.log(lenPower) #al posto di len(Power_Ratio) perche' magari non ho usato VLF
    
    labels= np.array(['VLF', 'LF', 'HF', 'Total', 'nVLF', 'nLF', 'nHF', 'LFn', 'HFn', 'LFHF', 'SpecEn'],  dtype='S10')
    
    return [VLF, LF, HF, Total, nVLF, nLF, nHF, LFn, HFn, LFHF, Spectral_Entropy], labels
示例#15
0
def scan_folder(parent):
    count = 0
    # iterate over all the files in directory 'parent'
    for file_name in os.listdir(parent):

        if file_name.endswith("Gyroscope.csv"):
            file_to_open = folder_name + '/' + file_name
            GYR_x_axis_raw = pd.read_csv(file_to_open,
                                         skipinitialspace=True,
                                         usecols=field0)
            df = pd.DataFrame(GYR_x_axis_raw)
            print('i am currently readding Gyroscope file of')
            print(folder_name_in)

            GYR_x_axis_raw_mean = df.mean(axis=None, skipna=True)
            GYR_x_axis_raw_mean = list(GYR_x_axis_raw_mean)
            GYR_x_axis_raw_mean = str(GYR_x_axis_raw_mean)[1:-1]
            print('GYR_x_axis_raw_mean', GYR_x_axis_raw_mean)

            GYR_x_axis_raw_std = df.std(axis=None, skipna=True)
            GYR_x_axis_raw_std = list(GYR_x_axis_raw_std)
            GYR_x_axis_raw_std = str(GYR_x_axis_raw_std)[1:-1]
            print('GYR_x_axis_raw_std', GYR_x_axis_raw_std)

            GYR_x_axis_raw_mad = stats.median_absolute_deviation(
                GYR_x_axis_raw)
            GYR_x_axis_raw_mad = list(GYR_x_axis_raw_mad)
            GYR_x_axis_raw_mad = str(GYR_x_axis_raw_mad)[1:-1]
            print('GYR_x_axis_raw_mad', GYR_x_axis_raw_mad)

            GYR_y_axis_raw = pd.read_csv(file_to_open,
                                         skipinitialspace=True,
                                         usecols=field1)
            df = pd.DataFrame(GYR_y_axis_raw)

            GYR_y_axis_raw_mean = df.mean(axis=None, skipna=True)
            GYR_y_axis_raw_mean = list(GYR_y_axis_raw_mean)
            GYR_y_axis_raw_mean = str(GYR_y_axis_raw_mean)[1:-1]
            print('GYR_y_axis_raw_mean', GYR_y_axis_raw_mean)

            GYR_y_axis_raw_std = df.std(axis=None, skipna=True)
            GYR_y_axis_raw_std = list(GYR_y_axis_raw_std)
            GYR_y_axis_raw_std = str(GYR_y_axis_raw_std)[1:-1]
            print('GYR_y_axis_raw_std', GYR_y_axis_raw_std)

            GYR_y_axis_raw_mad = stats.median_absolute_deviation(
                GYR_y_axis_raw)
            GYR_y_axis_raw_mad = list(GYR_y_axis_raw_mad)
            GYR_y_axis_raw_mad = str(GYR_y_axis_raw_mad)[1:-1]
            print('GYR_y_axis_raw_mad', GYR_y_axis_raw_mad)

            GYR_z_axis_raw = pd.read_csv(file_to_open,
                                         skipinitialspace=True,
                                         usecols=field2)
            df = pd.DataFrame(GYR_z_axis_raw)

            GYR_z_axis_raw_mean = df.mean(axis=None, skipna=True)
            GYR_z_axis_raw_mean = list(GYR_z_axis_raw_mean)
            GYR_z_axis_raw_mean = str(GYR_z_axis_raw_mean)[1:-1]
            print('GYR_z_axis_raw_mean', GYR_z_axis_raw_mean)

            GYR_z_axis_raw_std = df.std(axis=None, skipna=True)
            GYR_z_axis_raw_std = list(GYR_z_axis_raw_std)
            GYR_z_axis_raw_std = str(GYR_z_axis_raw_std)[1:-1]
            print('GYR_z_axis_raw_std', GYR_z_axis_raw_std)

            GYR_z_axis_raw_mad = stats.median_absolute_deviation(
                GYR_z_axis_raw)
            GYR_z_axis_raw_mad = list(GYR_z_axis_raw_mad)
            GYR_z_axis_raw_mad = str(GYR_z_axis_raw_mad)[1:-1]
            print('GYR_z_axis_raw_mad', GYR_z_axis_raw_mad)

            GYR_x_axis_raw = pd.DataFrame(GYR_x_axis_raw)
            GYR_x_axis_raw = GYR_x_axis_raw['X'].tolist()
            GYR_y_axis_raw = pd.DataFrame(GYR_y_axis_raw)
            GYR_y_axis_raw = GYR_y_axis_raw['Y'].tolist()
            GYR_z_axis_raw = pd.DataFrame(GYR_z_axis_raw)
            GYR_z_axis_raw = GYR_z_axis_raw['Z'].tolist()

            GYR_xy_axis_raw_corr, _ = stats.pearsonr(GYR_x_axis_raw,
                                                     GYR_y_axis_raw)
            print('GYR_xy_axis_raw_corr', GYR_xy_axis_raw_corr)
            GYR_xz_axis_raw_corr, _ = stats.pearsonr(GYR_x_axis_raw,
                                                     GYR_z_axis_raw)
            print('GYR_xz_axis_raw_corr', GYR_xz_axis_raw_corr)
            GYR_yz_axis_raw_corr, _ = stats.pearsonr(GYR_y_axis_raw,
                                                     GYR_z_axis_raw)
            print('GYR_yz_axis_raw_corr', GYR_yz_axis_raw_corr)

            GYR_x_axis_raw_iqr = iqr(GYR_x_axis_raw)
            print('GYR_x_axis_raw_iqr', GYR_x_axis_raw_iqr)
            GYR_y_axis_raw_iqr = iqr(GYR_y_axis_raw)
            print('GYR_y_axis_raw_iqr', GYR_y_axis_raw_iqr)
            GYR_z_axis_raw_iqr = iqr(GYR_z_axis_raw)
            print('GYR_z_axis_raw_iqr', GYR_z_axis_raw_iqr)

            GYR_x_axis_raw_skew = skew(GYR_x_axis_raw)
            print('GYR_x_axis_raw_skew', GYR_x_axis_raw_skew)
            GYR_y_axis_raw_skew = skew(GYR_y_axis_raw)
            print('GYR_y_axis_raw_skew', GYR_y_axis_raw_skew)
            GYR_z_axis_raw_skew = skew(GYR_z_axis_raw)
            print('GYR_z_axis_raw_skew', GYR_z_axis_raw_skew)

            GYR_x_axis_raw_entropy = entropy(GYR_x_axis_raw)
            print('GYR_x_axis_raw_entropy', GYR_x_axis_raw_entropy)
            GYR_y_axis_raw_entropy = entropy(GYR_y_axis_raw)
            print('GYR_y_axis_raw_entropy', GYR_y_axis_raw_entropy)
            GYR_z_axis_raw_entropy = entropy(GYR_z_axis_raw)
            print('GYR_z_axis_raw_entropy', GYR_z_axis_raw_entropy)

            GYR_x_axis_raw_arCoeff, energy_x, reflectioncoeffs_x = spectrum.arburg(
                GYR_x_axis_raw, 4)
            GYR_x_axis_raw_arCoeff_1 = str(GYR_x_axis_raw_arCoeff[0])[1:-1]
            GYR_x_axis_raw_arCoeff_2 = str(GYR_x_axis_raw_arCoeff[1])[1:-1]
            GYR_x_axis_raw_arCoeff_3 = str(GYR_x_axis_raw_arCoeff[2])[1:-1]
            GYR_x_axis_raw_arCoeff_4 = str(GYR_x_axis_raw_arCoeff[3])[1:-1]
            print('GYR_x_axis_raw_arCoeff', GYR_x_axis_raw_arCoeff)
            GYR_y_axis_raw_arCoeff, energy_y, reflectioncoeffs_y = spectrum.arburg(
                GYR_y_axis_raw, 4)
            GYR_y_axis_raw_arCoeff_1 = str(GYR_y_axis_raw_arCoeff[0])[1:-1]
            GYR_y_axis_raw_arCoeff_2 = str(GYR_y_axis_raw_arCoeff[1])[1:-1]
            GYR_y_axis_raw_arCoeff_3 = str(GYR_y_axis_raw_arCoeff[2])[1:-1]
            GYR_y_axis_raw_arCoeff_4 = str(GYR_y_axis_raw_arCoeff[3])[1:-1]
            print('GYR_y_axis_raw_arCoeff', GYR_y_axis_raw_arCoeff)
            GYR_z_axis_raw_arCoeff, energy_z, reflectioncoeffs_z = spectrum.arburg(
                GYR_z_axis_raw, 4)
            GYR_z_axis_raw_arCoeff_1 = str(GYR_z_axis_raw_arCoeff[0])[1:-1]
            GYR_z_axis_raw_arCoeff_2 = str(GYR_z_axis_raw_arCoeff[1])[1:-1]
            GYR_z_axis_raw_arCoeff_3 = str(GYR_z_axis_raw_arCoeff[2])[1:-1]
            GYR_z_axis_raw_arCoeff_4 = str(GYR_z_axis_raw_arCoeff[3])[1:-1]
            print('GYR_z_axis_raw_arCoeff', GYR_z_axis_raw_arCoeff)

            GYR_x_axis_raw_kurtosis = kurtosis(GYR_x_axis_raw)
            print('GYR_x_axis_raw_kurtosis', GYR_x_axis_raw_kurtosis)
            GYR_y_axis_raw_kurtosis = kurtosis(GYR_y_axis_raw)
            print('GYR_y_axis_raw_kurtosis', GYR_y_axis_raw_kurtosis)
            GYR_z_axis_raw_kurtosis = kurtosis(GYR_z_axis_raw)
            print('GYR_z_axis_raw_kurtosis', GYR_z_axis_raw_kurtosis)
            GYR_x_axis_raw_energy = (functools.reduce(
                lambda x, y: x + y * y, GYR_x_axis_raw)) / len(GYR_x_axis_raw)
            print('GYR_x_axis_raw_energy', GYR_x_axis_raw_energy)
            GYR_y_axis_raw_energy = (functools.reduce(
                lambda x, y: x + y * y, GYR_y_axis_raw)) / len(GYR_y_axis_raw)
            print('GYR_y_axis_raw_energy', GYR_y_axis_raw_energy)
            GYR_z_axis_raw_energy = (functools.reduce(
                lambda x, y: x + y * y, GYR_z_axis_raw)) / len(GYR_z_axis_raw)
            print('GYR_z_axis_raw_energy', GYR_z_axis_raw_energy)
            count = count + 1

        elif file_name.endswith("Pressure.csv"):
            file_to_open = folder_name + '/' + file_name
            Mbar_raw = pd.read_csv(file_to_open,
                                   skipinitialspace=True,
                                   usecols=field3)
            df = pd.DataFrame(Mbar_raw)
            print('i am currently readding Pressure file of')
            print(folder_name_in)

            Mbar_raw_mean = df.mean(axis=None, skipna=True)
            Mbar_raw_mean = list(Mbar_raw_mean)
            Mbar_raw_mean = str(Mbar_raw_mean)[1:-1]
            print('Mbar_raw_mean', Mbar_raw_mean)

            Mbar_raw_std = df.std(axis=None, skipna=True)
            Mbar_raw_std = list(Mbar_raw_std)
            Mbar_raw_std = str(Mbar_raw_std)[1:-1]
            print('Mbar_raw_std', Mbar_raw_std)

            Mbar_raw_mad = stats.median_absolute_deviation(Mbar_raw)
            Mbar_raw_mad = list(Mbar_raw_mad)
            Mbar_raw_mad = str(Mbar_raw_mad)[1:-1]
            print('Mbar_raw_mad', Mbar_raw_mad)

            Mbar_raw = pd.DataFrame(Mbar_raw)
            Mbar_raw = Mbar_raw['Millibars'].tolist()

            Mbar_raw_iqr = iqr(Mbar_raw)
            print('Mbar_raw_iqr', Mbar_raw_iqr)

            Mbar_raw_skew = skew(Mbar_raw)

            print('Mbar_raw_skew', Mbar_raw_skew)

            Mbar_raw_entropy = entropy(Mbar_raw)
            print('Mbar_raw_entropy', Mbar_raw_entropy)

            Mbar_raw_arCoeff, energy_x, reflectioncoeffs_x = spectrum.arburg(
                Mbar_raw, 4)
            Mbar_raw_arCoeff_1 = str(Mbar_raw_arCoeff[0])[1:-1]
            Mbar_raw_arCoeff_2 = str(Mbar_raw_arCoeff[1])[1:-1]
            Mbar_raw_arCoeff_3 = str(Mbar_raw_arCoeff[2])[1:-1]
            Mbar_raw_arCoeff_4 = str(Mbar_raw_arCoeff[3])[1:-1]
            print('Mbar_raw_arCoeff', Mbar_raw_arCoeff)

            Mbar_raw_kurtosis = kurtosis(Mbar_raw)
            print('Mbar_raw_kurtosis', Mbar_raw_kurtosis)
            Mbar_raw_energy = (functools.reduce(lambda x, y: x + y * y,
                                                Mbar_raw)) / len(Mbar_raw)
            print('Mbar_raw_energy', Mbar_raw_energy)
            count = count + 1
        elif file_name.endswith("RotationVector.csv"):
            file_to_open = folder_name + '/' + file_name
            CosO = pd.read_csv(file_to_open,
                               skipinitialspace=True,
                               usecols=field4)
            df = pd.DataFrame(CosO)
            print('i am currently readding RotationVector file of')
            print(folder_name_in)

            CosO_mean = df.mean(axis=None, skipna=True)
            CosO_mean = list(CosO_mean)
            CosO_mean = str(CosO_mean)[1:-1]
            print('CosO_mean', CosO_mean)

            CosO_std = df.std(axis=None, skipna=True)
            CosO_std = list(CosO_std)
            CosO_std = str(CosO_std)[1:-1]
            print('CosO_std', CosO_std)

            CosO_mad = stats.median_absolute_deviation(CosO)
            CosO_mad = list(CosO_mad)
            CosO_mad = str(CosO_mad)[1:-1]
            print('CosO_mad', CosO_mad)

            CosO = pd.DataFrame(CosO)
            CosO = CosO['cos'].tolist()

            CosO_iqr = iqr(CosO)
            print('CosO_iqr', CosO_iqr)

            CosO_skew = skew(CosO)

            print('CosO_skew', CosO_skew)

            CosO_entropy = entropy(CosO)
            print('CosO_entropy', CosO_entropy)

            CosO_arCoeff, energy_x, reflectioncoeffs_x = spectrum.arburg(
                CosO, 4)
            CosO_arCoeff_1 = str(CosO_arCoeff[0])[1:-1]
            CosO_arCoeff_2 = str(CosO_arCoeff[1])[1:-1]
            CosO_arCoeff_3 = str(CosO_arCoeff[2])[1:-1]
            CosO_arCoeff_4 = str(CosO_arCoeff[3])[1:-1]
            print('CosO_arCoeff', CosO_arCoeff)

            CosO_kurtosis = kurtosis(CosO)
            print('CosO_kurtosis', CosO_kurtosis)
            CosO_energy = (functools.reduce(lambda x, y: x + y * y,
                                            CosO)) / len(CosO)
            print('CosO_energy', CosO_energy)
            count = count + 1
        elif file_name.endswith("Accelerometer.csv"):
            file_to_open = folder_name + '/' + file_name
            ACC_x_axis_raw = pd.read_csv(file_to_open,
                                         skipinitialspace=True,
                                         usecols=field0)
            df = pd.DataFrame(ACC_x_axis_raw)
            print('i am currently readding Acclerometer file of')
            print(folder_name_in)

            ACC_x_axis_raw_mean = df.mean(axis=None, skipna=True)
            ACC_x_axis_raw_mean = list(ACC_x_axis_raw_mean)
            ACC_x_axis_raw_mean = str(ACC_x_axis_raw_mean)[1:-1]
            print('ACC_x_axis_raw_mean', ACC_x_axis_raw_mean)

            ACC_x_axis_raw_std = df.std(axis=None, skipna=True)
            ACC_x_axis_raw_std = list(ACC_x_axis_raw_std)
            ACC_x_axis_raw_std = str(ACC_x_axis_raw_std)[1:-1]
            print('ACC_x_axis_raw_std', ACC_x_axis_raw_std)

            ACC_x_axis_raw_mad = stats.median_absolute_deviation(
                ACC_x_axis_raw)
            ACC_x_axis_raw_mad = list(ACC_x_axis_raw_mad)
            ACC_x_axis_raw_mad = str(ACC_x_axis_raw_mad)[1:-1]
            print('ACC_x_axis_raw_mad', ACC_x_axis_raw_mad)

            ACC_y_axis_raw = pd.read_csv(file_to_open,
                                         skipinitialspace=True,
                                         usecols=field1)
            df = pd.DataFrame(ACC_y_axis_raw)

            ACC_y_axis_raw_mean = df.mean(axis=None, skipna=True)
            ACC_y_axis_raw_mean = list(ACC_y_axis_raw_mean)
            ACC_y_axis_raw_mean = str(ACC_y_axis_raw_mean)[1:-1]
            print('ACC_y_axis_raw_mean', ACC_y_axis_raw_mean)

            ACC_y_axis_raw_std = df.std(axis=None, skipna=True)
            ACC_y_axis_raw_std = list(ACC_y_axis_raw_std)
            ACC_y_axis_raw_std = str(ACC_y_axis_raw_std)[1:-1]
            print('ACC_y_axis_raw_std', ACC_y_axis_raw_std)

            ACC_y_axis_raw_mad = stats.median_absolute_deviation(
                ACC_y_axis_raw)
            ACC_y_axis_raw_mad = list(ACC_y_axis_raw_mad)
            ACC_y_axis_raw_mad = str(ACC_y_axis_raw_mad)[1:-1]
            print('ACC_y_axis_raw_mad', ACC_y_axis_raw_mad)

            ACC_z_axis_raw = pd.read_csv(file_to_open,
                                         skipinitialspace=True,
                                         usecols=field2)
            df = pd.DataFrame(ACC_z_axis_raw)

            ACC_z_axis_raw_mean = df.mean(axis=None, skipna=True)
            ACC_z_axis_raw_mean = list(ACC_z_axis_raw_mean)
            ACC_z_axis_raw_mean = str(ACC_z_axis_raw_mean)[1:-1]
            print('ACC_z_axis_raw_mean', ACC_z_axis_raw_mean)

            ACC_z_axis_raw_std = df.std(axis=None, skipna=True)
            ACC_z_axis_raw_std = list(ACC_z_axis_raw_std)
            ACC_z_axis_raw_std = str(ACC_z_axis_raw_std)[1:-1]
            print('ACC_z_axis_raw_std', ACC_z_axis_raw_std)

            ACC_z_axis_raw_mad = stats.median_absolute_deviation(
                ACC_z_axis_raw)
            ACC_z_axis_raw_mad = list(ACC_z_axis_raw_mad)
            ACC_z_axis_raw_mad = str(ACC_z_axis_raw_mad)[1:-1]
            print('ACC_z_axis_raw_mad', ACC_z_axis_raw_mad)

            ACC_x_axis_raw = pd.DataFrame(ACC_x_axis_raw)
            ACC_x_axis_raw = ACC_x_axis_raw['X'].tolist()
            ACC_y_axis_raw = pd.DataFrame(ACC_y_axis_raw)
            ACC_y_axis_raw = ACC_y_axis_raw['Y'].tolist()
            ACC_z_axis_raw = pd.DataFrame(ACC_z_axis_raw)
            ACC_z_axis_raw = ACC_z_axis_raw['Z'].tolist()

            ACC_xy_axis_raw_corr, _ = stats.pearsonr(ACC_x_axis_raw,
                                                     ACC_y_axis_raw)
            print('ACC_xy_axis_raw_corr', ACC_xy_axis_raw_corr)
            ACC_xz_axis_raw_corr, _ = stats.pearsonr(ACC_x_axis_raw,
                                                     ACC_z_axis_raw)
            print('ACC_xz_axis_raw_corr', ACC_xz_axis_raw_corr)
            ACC_yz_axis_raw_corr, _ = stats.pearsonr(ACC_y_axis_raw,
                                                     ACC_z_axis_raw)
            print('ACC_yz_axis_raw_corr', ACC_yz_axis_raw_corr)

            ACC_x_axis_raw_iqr = iqr(ACC_x_axis_raw)
            print('ACC_x_axis_raw_iqr', ACC_x_axis_raw_iqr)
            ACC_y_axis_raw_iqr = iqr(ACC_y_axis_raw)
            print('ACC_y_axis_raw_iqr', ACC_y_axis_raw_iqr)
            ACC_z_axis_raw_iqr = iqr(ACC_z_axis_raw)
            print('ACC_z_axis_raw_iqr', ACC_z_axis_raw_iqr)

            ACC_x_axis_raw_skew = skew(ACC_x_axis_raw)
            print('ACC_x_axis_raw_skew', ACC_x_axis_raw_skew)
            ACC_y_axis_raw_skew = skew(ACC_y_axis_raw)
            print('ACC_y_axis_raw_skew', ACC_y_axis_raw_skew)
            ACC_z_axis_raw_skew = skew(ACC_z_axis_raw)
            print('ACC_z_axis_raw_skew', ACC_z_axis_raw_skew)

            ACC_x_axis_raw_entropy = entropy(ACC_x_axis_raw)
            print('ACC_x_axis_raw_entropy', ACC_x_axis_raw_entropy)
            ACC_y_axis_raw_entropy = entropy(ACC_y_axis_raw)
            print('ACC_y_axis_raw_entropy', ACC_y_axis_raw_entropy)
            ACC_z_axis_raw_entropy = entropy(ACC_z_axis_raw)
            print('ACC_z_axis_raw_entropy', ACC_z_axis_raw_entropy)

            ACC_x_axis_raw_arCoeff, energy_x, reflectioncoeffs_x = spectrum.arburg(
                ACC_x_axis_raw, 4)
            ACC_x_axis_raw_arCoeff_1 = str(ACC_x_axis_raw_arCoeff[0])[1:-1]
            ACC_x_axis_raw_arCoeff_2 = str(ACC_x_axis_raw_arCoeff[1])[1:-1]
            ACC_x_axis_raw_arCoeff_3 = str(ACC_x_axis_raw_arCoeff[2])[1:-1]
            ACC_x_axis_raw_arCoeff_4 = str(ACC_x_axis_raw_arCoeff[3])[1:-1]
            print('ACC_x_axis_raw_arCoeff', ACC_x_axis_raw_arCoeff)
            ACC_y_axis_raw_arCoeff, energy_y, reflectioncoeffs_y = spectrum.arburg(
                ACC_y_axis_raw, 4)
            ACC_y_axis_raw_arCoeff_1 = str(ACC_y_axis_raw_arCoeff[0])[1:-1]
            ACC_y_axis_raw_arCoeff_2 = str(ACC_y_axis_raw_arCoeff[1])[1:-1]
            ACC_y_axis_raw_arCoeff_3 = str(ACC_y_axis_raw_arCoeff[2])[1:-1]
            ACC_y_axis_raw_arCoeff_4 = str(ACC_y_axis_raw_arCoeff[3])[1:-1]
            print('ACC_y_axis_raw_arCoeff', ACC_y_axis_raw_arCoeff)
            ACC_z_axis_raw_arCoeff, energy_z, reflectioncoeffs_z = spectrum.arburg(
                ACC_z_axis_raw, 4)
            ACC_z_axis_raw_arCoeff_1 = str(ACC_z_axis_raw_arCoeff[0])[1:-1]
            ACC_z_axis_raw_arCoeff_2 = str(ACC_z_axis_raw_arCoeff[1])[1:-1]
            ACC_z_axis_raw_arCoeff_3 = str(ACC_z_axis_raw_arCoeff[2])[1:-1]
            ACC_z_axis_raw_arCoeff_4 = str(ACC_z_axis_raw_arCoeff[3])[1:-1]
            print('ACC_z_axis_raw_arCoeff', ACC_z_axis_raw_arCoeff)

            ACC_x_axis_raw_kurtosis = kurtosis(ACC_x_axis_raw)
            print('ACC_x_axis_raw_kurtosis', ACC_x_axis_raw_kurtosis)
            ACC_y_axis_raw_kurtosis = kurtosis(ACC_y_axis_raw)
            print('ACC_y_axis_raw_kurtosis', ACC_y_axis_raw_kurtosis)
            ACC_z_axis_raw_kurtosis = kurtosis(ACC_z_axis_raw)
            print('ACC_z_axis_raw_kurtosis', ACC_z_axis_raw_kurtosis)

            ACC_x_axis_raw_energy = (functools.reduce(
                lambda x, y: x + y * y, ACC_x_axis_raw)) / len(ACC_x_axis_raw)
            print('ACC_x_axis_raw_energy', ACC_x_axis_raw_energy)
            ACC_y_axis_raw_energy = (functools.reduce(
                lambda x, y: x + y * y, ACC_y_axis_raw)) / len(ACC_y_axis_raw)
            print('ACC_y_axis_raw_energy', ACC_y_axis_raw_energy)
            ACC_z_axis_raw_energy = (functools.reduce(
                lambda x, y: x + y * y, ACC_z_axis_raw)) / len(ACC_z_axis_raw)
            print('ACC_z_axis_raw_energy', ACC_z_axis_raw_energy)
            count = count + 1

            if count == 4:

                writer.writerow([
                    folder_name_in, ACC_x_axis_raw_mean, ACC_y_axis_raw_mean,
                    ACC_z_axis_raw_mean, ACC_x_axis_raw_std,
                    ACC_y_axis_raw_std, ACC_z_axis_raw_std, ACC_x_axis_raw_mad,
                    ACC_y_axis_raw_mad, ACC_z_axis_raw_mad,
                    ACC_xy_axis_raw_corr, ACC_xz_axis_raw_corr,
                    ACC_yz_axis_raw_corr, ACC_x_axis_raw_iqr,
                    ACC_y_axis_raw_iqr, ACC_z_axis_raw_iqr,
                    ACC_x_axis_raw_skew, ACC_y_axis_raw_skew,
                    ACC_z_axis_raw_skew, ACC_x_axis_raw_entropy,
                    ACC_y_axis_raw_entropy, ACC_z_axis_raw_entropy,
                    ACC_x_axis_raw_arCoeff_1, ACC_x_axis_raw_arCoeff_2,
                    ACC_x_axis_raw_arCoeff_3, ACC_x_axis_raw_arCoeff_4,
                    ACC_y_axis_raw_arCoeff_1, ACC_y_axis_raw_arCoeff_2,
                    ACC_y_axis_raw_arCoeff_3, ACC_y_axis_raw_arCoeff_4,
                    ACC_z_axis_raw_arCoeff_1, ACC_z_axis_raw_arCoeff_2,
                    ACC_z_axis_raw_arCoeff_3, ACC_z_axis_raw_arCoeff_4,
                    ACC_x_axis_raw_kurtosis, ACC_y_axis_raw_kurtosis,
                    ACC_z_axis_raw_kurtosis, ACC_x_axis_raw_energy,
                    ACC_y_axis_raw_energy, ACC_z_axis_raw_energy,
                    GYR_x_axis_raw_mean, GYR_y_axis_raw_mean,
                    GYR_z_axis_raw_mean, GYR_x_axis_raw_std,
                    GYR_y_axis_raw_std, GYR_z_axis_raw_std, GYR_x_axis_raw_mad,
                    GYR_y_axis_raw_mad, GYR_z_axis_raw_mad,
                    GYR_xy_axis_raw_corr, GYR_xz_axis_raw_corr,
                    GYR_yz_axis_raw_corr, GYR_x_axis_raw_iqr,
                    GYR_y_axis_raw_iqr, GYR_z_axis_raw_iqr,
                    GYR_x_axis_raw_skew, GYR_y_axis_raw_skew,
                    GYR_z_axis_raw_skew, GYR_x_axis_raw_entropy,
                    GYR_y_axis_raw_entropy, GYR_z_axis_raw_entropy,
                    GYR_x_axis_raw_arCoeff_1, GYR_x_axis_raw_arCoeff_2,
                    GYR_x_axis_raw_arCoeff_3, GYR_x_axis_raw_arCoeff_4,
                    GYR_y_axis_raw_arCoeff_1, GYR_y_axis_raw_arCoeff_2,
                    GYR_y_axis_raw_arCoeff_3, GYR_y_axis_raw_arCoeff_4,
                    GYR_z_axis_raw_arCoeff_1, GYR_z_axis_raw_arCoeff_2,
                    GYR_z_axis_raw_arCoeff_3, GYR_z_axis_raw_arCoeff_4,
                    GYR_x_axis_raw_kurtosis, GYR_y_axis_raw_kurtosis,
                    GYR_z_axis_raw_kurtosis, GYR_x_axis_raw_energy,
                    GYR_y_axis_raw_energy, GYR_z_axis_raw_energy,
                    Mbar_raw_mean, Mbar_raw_std, Mbar_raw_mad, Mbar_raw_iqr,
                    Mbar_raw_skew, Mbar_raw_entropy, Mbar_raw_arCoeff_1,
                    Mbar_raw_arCoeff_2, Mbar_raw_arCoeff_3, Mbar_raw_arCoeff_4,
                    Mbar_raw_kurtosis, Mbar_raw_energy, CosO_mean, CosO_std,
                    CosO_mad, CosO_iqr, CosO_skew, CosO_entropy,
                    CosO_arCoeff_1, CosO_arCoeff_2, CosO_arCoeff_3,
                    CosO_arCoeff_4, CosO_kurtosis, CosO_energy
                ])
                count = 0

        else:
            current_path = "".join((parent, "/", file_name))
            if os.path.isdir(current_path):
                # if we're checking a sub-directory, recall this method
                scan_folder(current_path)