Example #1
0
def recon_1d_sure(data,tree,estimated_var=1.0):
    """
    Reconstruction of a function in one dimension using SURE wavelet shrinkage.
    """
    haar_coefs = haar.haar_transform(data, tree)
    new_haar_coefs = np.zeros(haar_coefs.shape)
    coef_levels = haar.level_correspondence(tree)
    for level in np.unique(coef_levels):
        hc = haar_coefs[coef_levels==level]
        if len(hc) == 1:
            t = 0
        else:
            x = np.arange(0,6.0,0.1)
            estimates = []
            for threshold in x:
                estimate = sure(hc,threshold,estimated_var)
                estimates.append(estimate[0])
            t = x[np.argmin(estimates)]
        new_haar_coefs[coef_levels==level] = shrink_coefs(hc,t)
    return haar.inverse_haar_transform(new_haar_coefs, tree)
Example #2
0
def recon_1d_sure(data, tree, estimated_var=1.0):
    """
    Reconstruction of a function in one dimension using SURE wavelet shrinkage.
    """
    haar_coefs = haar.haar_transform(data, tree)
    new_haar_coefs = np.zeros(haar_coefs.shape)
    coef_levels = haar.level_correspondence(tree)
    for level in np.unique(coef_levels):
        hc = haar_coefs[coef_levels == level]
        if len(hc) == 1:
            t = 0
        else:
            x = np.arange(0, 6.0, 0.1)
            estimates = []
            for threshold in x:
                estimate = sure(hc, threshold, estimated_var)
                estimates.append(estimate[0])
            t = x[np.argmin(estimates)]
        new_haar_coefs[coef_levels == level] = shrink_coefs(hc, t)
    return haar.inverse_haar_transform(new_haar_coefs, tree)
Example #3
0
def train():
	global PARTITION
	print "Importing data"
	dataset = data_wrangler.extract_data()
	print "done importing data"
	haar_dataset = []
	for i in range(1,9):
		print "Haar transform of Gesture: " + str(i)
		for j in range(len(dataset[i])):
			if (len(dataset[i][j].shape) == 1):
				continue
			datapoint = [haar.haar_transform(dataset[i][j][k]) for k in range(3)]
			datapoint = np.concatenate(datapoint)
			if (sum(datapoint) != 0):
				haar_dataset.append([datapoint,i])
	random.shuffle(haar_dataset)
	if REAL_TRAIN:
		PARTITION = len(haar_dataset)
	train_data = map(lambda x: x[0], haar_dataset)
	train_labels = map(lambda x: x[1], haar_dataset)

	print "Done Transforms! Now Training"
	gest_classifier = svm.SVC(C=8,kernel='rbf', gamma=0.5)
	gest_classifier.fit(train_data[:PARTITION],train_labels[:PARTITION])
 	print "Done Training! Ready!"
 	if not REAL_TRAIN:
		count_right = 0
		print "Starting Validation"

		for i in range(PARTITION,len(train_labels)):
			if gest_classifier.predict(train_data[i])[0] == train_labels[i]:
				count_right += 1
		print "Accuracy : " + str((count_right+0.0)/(len(train_labels)- PARTITION))

	def predict(x,y,z):
		datapoint = [haar.haar_transform(var) for var in [x,y,z]]
		datapoint = np.concatenate(datapoint)
		return gest_classifier.predict(datapoint)[0]

	return predict
Example #4
0
	def predict(x,y,z):
		datapoint = [haar.haar_transform(var) for var in [x,y,z]]
		datapoint = np.concatenate(datapoint)
		return gest_classifier.predict(datapoint)[0]
Example #5
0
import pywt, numpy as np
from haar import haar_transform, inverse_haar

#print pywt.families()

data = np.ones((4,4), dtype=np.float64)
#print data
coeffs = pywt.dwt2(data, 'haar')
cA, (cH, cV, cD) = coeffs
#print cA
#print cH
#print cV

mode = 'haar'
testa = [1,2,3,4,5,6,7,8]
coeffs = pywt.wavedec(testa, mode, level=3)
print coeffs
#coeffs_H = list(coeffs)
#coeffs_H[0] = np.zeros(coeffs_H[0].shape)
#print pywt.waverec2(coeffs_H, mode)

haar_r = haar_transform(np.asarray(testa, dtype='float'))
print haar_r
print inverse_haar(haar_r)