def countStepByZcross(fname, look, zcTh, varTh=1, debug=False):
    lpf=LPF()
    if os.path.isdir(fname):
        paths=glob(os.path.join(fname, '*.xml') )
    else:
        paths=[fname]
    
    steps={}
    for path in paths:
        d=parseXml2dic(path)
        if look==0:
            accWf=getAccWf(d)
            data=accWf[2]
        elif look==1:
            data=(d[Keys.kAx]**2+d[Keys.kAy]**2+d[Keys.kAz]**2)**0.5
        #data=lpf.lpfScipy(data)
        data=lpf.lpfTest(data)
        s=countStep(data, zcTh, varTh, debug)
        steps[os.path.basename(path)]=s
    return steps
    pass
def checkNoise(noiseFactor):
    mses = []
    noises = []
    for coeffs_num in coeffs_nums:
        sums = []
        for i in xrange(30):
            noise = map(add, np.random.normal(0, (amplitude / noiseFactor) ** 2, len(list)), list)
            clean = LPF.clean(noise, coeffs_num)
            sums.append(me(list, clean))
            noises.append(me(list, noise))
        mses.append(np.mean(sums))
    plt.plot(coeffs_nums, mses, label="amplitude/noise_std=" + str(factor))
    m = np.mean(noises)
    plt.plot(coeffs_nums, [m] * len(coeffs_nums), label="noise reference for " + str(factor))
Beispiel #3
0
def stitchByOrder(parts):
    byOrder = copy.copy(parts[0])
    averaged = copy.copy(byOrder)
    for part in parts[1:]:
        byOrder, averaged = appendFrac(byOrder, part, averaged)
    
    cleaned, cleaned_time = LPF.clean(averaged) 
    plt.figure()
    plt.title('Stitching by order')
    plt.plot(xrange(len(input)), input, color='blue', label='original, SM='+str(cost(input)))
    plt.plot(xrange(len(byOrder)), byOrder, color='black', label='byOrder, SM='+str(cost(byOrder)))
    plt.plot(xrange(len(averaged)), averaged, color='red', label='byOrder and averaged, SM='+str(cost(averaged)))
    plt.plot(xrange(len(cleaned)), cleaned, color='green', label='byOrder and LPF and averaged, SM='+str(cost(cleaned)))
    plt.legend().draggable()
Beispiel #4
0
def stitch(parts, m1=MEAN_COEFF, m2=STD_COEFF, epsilon=EPSILON_FACTOR, gap=GAP_FACTOR, overlap=OVERLAP_FACTOR):
    """
    MEAN_COEFF=m1
    STD_COEFF=m2
    EPSILON_FACTOR=epsilon
    OVERLAP_FACTOR=overlap
    GAP_FACTOR=gap
    """
    #greedy
    parts.sort()
    parts.reverse()
    averaged = copy.copy(parts[0])
    notUsed = []
    for part in parts[1:]:
        lenBefore = len(averaged)
        #greedy, kuku = appendFrac(greedy, part, averaged)
        averaged = appendFracaveraged(averaged, part,  m1, m2, epsilon, gap, overlap)
        if(lenBefore == len(averaged)):
            notUsed.append(part)   
    counter = 0
    for part in notUsed:
        lenBefore = len(averaged)
        averaged = prependFrac(averaged, part,  m1, m2, epsilon, gap, overlap)
        if(len(averaged) != lenBefore):
            counter+=1
            notUsed.remove(part)
    """
    for part in notUsed:
        a = np.sum(averaged)
        lcs.lcs(averaged, part, isEqual)
        #if(a!=np.sum(averaged)):
         #   print 'kuku'
    """
    clean, clean_time = LPF.clean(averaged)
    plt.figure()
    plt.plot(clean)
    return clean
Beispiel #5
0
        if not start:
            start=True
            axvline(i+1-winsz*2, c='r', lw=2)
            
        steps+=getZccByZline(buf, zline, i+1-winsz*2, debug=True, begin=0, end=winsz)
        del cbufv[:winsz-overlap]
        del cbufvlpf[:winsz-overlap]
    i+=1
    return steps


#fname=r'D:/Documents/Desktop/step-counting-data/old-linear/segmented/ZC30fast_a9_0.xml'
figure()
d=parseXml2dic(fname)
data=(d[Keys.kAx]**2+d[Keys.kAy]**2+d[Keys.kAz]**2)**0.5
lpf=LPF()
dlpf=lpf.lpfTest(data)
d11=lpf.lpfTest(data, 11)
tslist=d[Keys.kTs]
#plot(tslist, data)
#plot(data, 'b', dlpf, 'r')
#plot(d11, 'g')
plot(dlpf, 'r')

#diff(tslist)
for j in range(len(tslist)):
    s2=dyZcrossOnline(data[j], tslist[j], True)

#plot(tmp, 'c')
print "online:", s2
import numpy as np
import numpy.random as rnd
import matplotlib.pyplot as plt 
from utils.vicon.amcParser import getAMCperiod
from utils.stitching.stitching import MAXIMA_ORDER, CLUSTER_COEFF, plotParts, createParts
import utils.stitching.stitching as loop
import utils.periodAnalysisUtils as ut
from operator import add, sub
import utils.LPF as LPF 

file = 'AMCs/598.amc'
joint = 'root'
list = getAMCperiod(joint, file)
list = list[:538]
#stride = list[112:251] 
#list = ut.alignByMax(stride)
#list = ut.alignByMax(list)
amplitude = np.max(list) - np.min(list)
parts = createParts(list, True, 9, amplitude/10)
merged = loop.stitch(parts)
merged.sort()
print(len(merged))
#out = ut.alignByMax(merged[-1])
out = LPF.clean(merged[-1]) 
outOff, listOff = ut.alignByBig(out, list)
plt.figure()
plt.plot(xrange(listOff, listOff+len(list)), list)
plt.plot(xrange(outOff, outOff+len(out)), out)
plt.show()
Beispiel #7
0
list = ut.alignByMax(stride)
amplitude = np.max(list) - np.min(list)
coeffs_num = 16
window_size = 16
maFactor = 1.2
msesMA = []
msesLPF = []
noises = []
noisesFactors = xrange(2,20)
for noiseFactor in noisesFactors:
    sumsLPF = []
    sumsMA = []
    sumsN = []
    for i in xrange(30):
        noise = map(add, np.random.normal(0,(amplitude/noiseFactor)**2,len(list)), list)
        clean = LPF.clean(noise, coeffs_num)
        ma = movingAverage(noise,  window_size, maFactor)
        sumsLPF.append(me(list, clean))
        sumsMA.append(me(list, ma))
        sumsN.append(me(list, noise))
    msesMA.append(np.mean(sumsMA))
    msesLPF.append(np.mean(sumsLPF))
    noises.append(np.mean(sumsN))
plt.plot(noisesFactors, msesMA, label='MA' )
plt.plot(noisesFactors, msesLPF, label='LPF' )
plt.plot(noisesFactors, noises, label='noise reference for ')

plt.xlabel('noises factors')
plt.ylabel('me')
plt.title('Mean error of different noises')
plt.legend().draggable()