Esempio n. 1
0
pickle.dump([img,img_name,img_identity,img_pose,img_expression,img_eye,\
        identity,pose,expression,eye],file_pkl)
file_pkl.close()

# Normalization of each image
for i in range(img.shape[0]):
    img[i] = (img[i]-img[i].min())*1./(img[i].max()-img[i].min())

img = img.reshape(img.shape[0],img.shape[1]*img.shape[2])
img = scale(img)

# 'global','local','manual'
flag_sigma = 'global'

# Compute similarity matrix
sigma,aff_img = compute_affinity(img,flag_sigma=flag_sigma,sigma=100.,nn=7)
if flag_sigma == 'local':
    sigma_init = sum(sigma**2)/len(sigma)
    print "Average Sigma(local): ",sigma_init

K = 20
# Construct existing solution Y
Y = np.zeros((img.shape[0],20))
for i in range(img.shape[0]):
    Y[i,img_identity[i]] = 1
val_lambda = 1.2
arr_tmp = val_lambda*Y.dot(Y.T)

label_pred_identity = spectral_clustering(aff_img,n_clusters=K)
nmi_identity = nmi(label_pred_identity,img_identity)
print nmi_identity
Esempio n. 2
0
# Normalize image
for i in range(img.shape[0]):
    img[i] = (img[i]-img[i].min())*1./(img[i].max()-img[i].min())

# PCA on image
pca = PCA(n_components=40)
feat_pca = pca.fit_transform(img.reshape(img.shape[0],\
    img.shape[1]*img.shape[2]))
print "Variance Ratio: ",sum(pca.explained_variance_ratio_)

# save PCA image
file_pkl = open("face_pca.pkl","wb")
pickle.dump(feat_pca,file_pkl)
file_pkl.close()

# compute affinity matrix
flag_sigma = 'global'

sigma_pca,aff_pca = compute_affinity(feat_pca,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)

label_pred_identity = spectral_clustering(aff_pca,n_clusters=20)
nmi_identity = nmi(label_pred_identity,img_identity)

label_pred_pose = spectral_clustering(aff_pca,n_clusters=4)
nmi_pose = nmi(label_pred_pose,img_pose)

print "nmi_identity",nmi_identity,"nmi_pose",nmi_pose

Esempio n. 3
0
feat_fft = tmp[:, 0:32]
file_fft.close()

# Load Gabor features
file_gabor = open("face_gabor.pkl", "rb")
feat_gabor = pickle.load(file_gabor)
file_gabor.close()

# Load LBP features
file_lbp = open("face_lbp.pkl", "rb")
feat_lbp = pickle.load(file_lbp)
file_lbp.close()

# Compute similarity matrix for FFT and Gabor
flag_sigma = 'global'
sigma_fft, aff_fft = compute_affinity(feat_fft, flag_sigma=flag_sigma)
sigma_gabor, aff_gabor = compute_affinity(feat_gabor, flag_sigma=flag_sigma)
sigma_lbp, aff_lbp = compute_affinity(feat_lbp, flag_sigma=flag_sigma)
print "kernel computation finished"

# Spectral Clustering using FFT
K = 4
label_pred_fft = spectral_clustering(aff_fft, n_clusters=K)
label_pred_gabor = spectral_clustering(aff_gabor, n_clusters=K)

nmi_fft_identity = nmi(label_pred_fft, img_identity)
nmi_gabor_identity = nmi(label_pred_gabor, img_identity)
print "nmi_fft_identity: ", nmi_fft_identity
print "nmi_gabor_identity: ", nmi_gabor_identity

for alpha in np.arange(0.1, 1.0, 0.1):
Esempio n. 4
0
        np.double(np.histogram(tmp, bins=range(10), normed=True)[0]))
    #feat_lbp.append(mahotas.features.lbp(img[i],1,8))
feat_lbp = scale(np.array(feat_lbp))

# PCA on LBP features
#pca = PCA(n_components=20)
#feat_lbp = pca.fit_transform(feat_lbp)
#print "Variance Ratio: ",sum(pca.explained_variance_ratio_)

# Normalization of features
#feat_lbp = scale(feat_lbp)

# Save LBP features
file_pkl = open("face_lbp.pkl", "wb")
pickle.dump(feat_lbp, file_pkl)
file_pkl.close()

# Compute affinity matrix
flag_sigma = 'global'
sigma_lbp, aff_lbp = compute_affinity(feat_lbp,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)
print "kernel computation finished"

label_pred_identity = spectral_clustering(aff_lbp, n_clusters=20)
nmi_identity = nmi(label_pred_identity, img_identity)
print "NMI with identity: ", nmi_identity

label_pred_pose = spectral_clustering(aff_lbp, n_clusters=4)
nmi_pose = nmi(label_pred_pose, img_pose)
print "NMI with pose: ", nmi_pose
Esempio n. 5
0
from cvxopt.solvers import qp
from python.multiview.utils.compute_affinity import compute_affinity
from python.multiview.utils.opt_affinity_weight import opt_affinity_weight

basepath = "/Users/changyale/dataset/mfeat/"
filename_fou = "mfeat-fou"
filename_fac = "mfeat-fac"

data_fou = scale(np.loadtxt(basepath+filename_fou))
data_fac = scale(np.loadtxt(basepath+filename_fac))

# 'global', 'local', 'manual'
flag_sigma = 'global'

# Default sigma=50.
sigma_fou, aff_fou = compute_affinity(data_fou,flag_sigma=flag_sigma,\
        sigma=147.9369,nn=8)
# Default sigma = 100.
sigma_fac, aff_fac = compute_affinity(data_fac,flag_sigma=flag_sigma,\
        sigma=422.6228,nn=8)
print "kernel computing finished"
if flag_sigma == 'local':
    sigma_fou_init = sum(sigma_fou**2)/len(sigma_fou)
    sigma_fac_init = sum(sigma_fac**2)/len(sigma_fac)

K = 10
label_true = []
for i in range(K):
    for j in range(200):
        label_true.append(i)

# Spectral Clustering: Fourier coefficient
Esempio n. 6
0
print feat_hog.shape

# PCA on HoG features
pca = PCA(n_components=40)
feat_hog = pca.fit_transform(feat_hog)
print "Variance Ratio: ", sum(pca.explained_variance_ratio_)

#feat_hog = scale(feat_hog)

# save HoG features
file_pkl = open("face_hog.pkl", "wb")
pickle.dump(feat_hog, file_pkl)
file_pkl.close()

# Compute similarity matrix
flag_sigma = 'global'
sigma_hog, aff_hog = compute_affinity(feat_hog,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)
print "kernel computation finished"

label_pred_identity = spectral_clustering(aff_hog, n_clusters=20)
nmi_identity = nmi(label_pred_identity, img_identity)
print "NMI with identity: ", nmi_identity

label_pred_pose = spectral_clustering(aff_hog, n_clusters=4)
nmi_pose = nmi(label_pred_pose, img_pose)
print "NMI with pose: ", nmi_pose

plt.imshow(hog_image, cmap=cm.Greys_r)
plt.show()
Esempio n. 7
0
file_lbp.close()

# Load HoG features
file_hog = open("face_hog.pkl","rb")
feat_hog = pickle.load(file_hog)
file_hog.close()

# Load PCA features
file_pca = open("face_pca.pkl","rb")
feat_pca = pickle.load(file_pca)
file_pca.close()


# Compute similarity matrix for RawData, FFT, Gabor, LBP, HoG, PCA
flag_sigma = 'global'
sigma_raw, aff_raw = compute_affinity(img.reshape(img.shape[0],img.shape[1]*\
        img.shape[2]),flag_sigma=flag_sigma)
sigma_fft, aff_fft = compute_affinity(feat_fft,flag_sigma=flag_sigma)
sigma_gabor, aff_gabor = compute_affinity(feat_gabor,flag_sigma=flag_sigma)
sigma_lbp, aff_lbp = compute_affinity(feat_lbp,flag_sigma=flag_sigma)
sigma_hog, aff_hog = compute_affinity(feat_hog,flag_sigma=flag_sigma)
sigma_pca, aff_pca = compute_affinity(feat_pca,flag_sigma=flag_sigma)

# Normalization of matrix using Frobenius norm
flag_normalization = False
if flag_normalization == True:
    aff_raw = aff_raw/la.norm(aff_raw)
    aff_fft = aff_fft/la.norm(aff_fft)
    aff_gabor = aff_gabor/la.norm(aff_gabor)
    aff_lbp = aff_lbp/la.norm(aff_lbp)
    aff_hog = aff_hog/la.norm(aff_hog)
    aff_pca = aff_pca/la.norm(aff_pca)
Esempio n. 8
0
feat_fft = tmp[:,0:32]
file_fft.close()

# Load Gabor features
file_gabor = open("face_gabor.pkl","rb")
feat_gabor = pickle.load(file_gabor)
file_gabor.close()

# Load LBP features
file_lbp = open("face_lbp.pkl","rb")
feat_lbp = pickle.load(file_lbp)
file_lbp.close()

# Compute similarity matrix for FFT and Gabor
flag_sigma = 'global'
sigma_fft, aff_fft = compute_affinity(feat_fft,flag_sigma=flag_sigma)
sigma_gabor, aff_gabor = compute_affinity(feat_gabor,flag_sigma=flag_sigma)
sigma_lbp, aff_lbp = compute_affinity(feat_lbp,flag_sigma=flag_sigma)
print "kernel computation finished"

# Spectral Clustering using FFT
K = 4
label_pred_fft = spectral_clustering(aff_fft,n_clusters=K)
label_pred_gabor = spectral_clustering(aff_gabor,n_clusters=K)

nmi_fft_identity = nmi(label_pred_fft,img_identity)
nmi_gabor_identity = nmi(label_pred_gabor,img_identity)
print "nmi_fft_identity: ", nmi_fft_identity
print "nmi_gabor_identity: ",nmi_gabor_identity

for alpha in np.arange(0.1,1.0,0.1):
Esempio n. 9
0
feat_lbp = pickle.load(file_lbp)
file_lbp.close()

# Load HoG features
file_hog = open("face_hog.pkl", "rb")
feat_hog = pickle.load(file_hog)
file_hog.close()

# Load PCA features
file_pca = open("face_pca.pkl", "rb")
feat_pca = pickle.load(file_pca)
file_pca.close()

# Compute similarity matrix for RawData, FFT, Gabor, LBP, HoG, PCA
flag_sigma = 'global'
sigma_raw, aff_raw = compute_affinity(img.reshape(img.shape[0],img.shape[1]*\
        img.shape[2]),flag_sigma=flag_sigma)
sigma_fft, aff_fft = compute_affinity(feat_fft, flag_sigma=flag_sigma)
sigma_gabor, aff_gabor = compute_affinity(feat_gabor, flag_sigma=flag_sigma)
sigma_lbp, aff_lbp = compute_affinity(feat_lbp, flag_sigma=flag_sigma)
sigma_hog, aff_hog = compute_affinity(feat_hog, flag_sigma=flag_sigma)
sigma_pca, aff_pca = compute_affinity(feat_pca, flag_sigma=flag_sigma)

# Normalization of matrix using Frobenius norm
flag_normalization = False
if flag_normalization == True:
    aff_raw = aff_raw / la.norm(aff_raw)
    aff_fft = aff_fft / la.norm(aff_fft)
    aff_gabor = aff_gabor / la.norm(aff_gabor)
    aff_lbp = aff_lbp / la.norm(aff_lbp)
    aff_hog = aff_hog / la.norm(aff_hog)
    aff_pca = aff_pca / la.norm(aff_pca)
Esempio n. 10
0
file_pkl.close()

# Normalize image
for i in range(img.shape[0]):
    img[i] = (img[i] - img[i].min()) * 1. / (img[i].max() - img[i].min())

# PCA on image
pca = PCA(n_components=40)
feat_pca = pca.fit_transform(img.reshape(img.shape[0],\
    img.shape[1]*img.shape[2]))
print "Variance Ratio: ", sum(pca.explained_variance_ratio_)

# save PCA image
file_pkl = open("face_pca.pkl", "wb")
pickle.dump(feat_pca, file_pkl)
file_pkl.close()

# compute affinity matrix
flag_sigma = 'global'

sigma_pca,aff_pca = compute_affinity(feat_pca,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)

label_pred_identity = spectral_clustering(aff_pca, n_clusters=20)
nmi_identity = nmi(label_pred_identity, img_identity)

label_pred_pose = spectral_clustering(aff_pca, n_clusters=4)
nmi_pose = nmi(label_pred_pose, img_pose)

print "nmi_identity", nmi_identity, "nmi_pose", nmi_pose
Esempio n. 11
0
print feat_hog.shape

# PCA on HoG features
pca = PCA(n_components=40)
feat_hog = pca.fit_transform(feat_hog)
print "Variance Ratio: ",sum(pca.explained_variance_ratio_)

#feat_hog = scale(feat_hog)

# save HoG features
file_pkl = open("face_hog.pkl","wb")
pickle.dump(feat_hog,file_pkl)
file_pkl.close()

# Compute similarity matrix
flag_sigma = 'global'
sigma_hog, aff_hog = compute_affinity(feat_hog,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)
print "kernel computation finished"

label_pred_identity = spectral_clustering(aff_hog,n_clusters=20)
nmi_identity = nmi(label_pred_identity,img_identity)
print "NMI with identity: ",nmi_identity

label_pred_pose = spectral_clustering(aff_hog,n_clusters=4)
nmi_pose = nmi(label_pred_pose,img_pose)
print "NMI with pose: ",nmi_pose

plt.imshow(hog_image,cmap=cm.Greys_r)
plt.show()
Esempio n. 12
0
label_e = np.hstack([np.zeros((1, 1000)), np.ones((1, 1000))])[0]
Y = np.zeros((2000, 2))
for i in range(0, 1000):
    Y[i, 0] = 1
for i in range(1000, 2000):
    Y[i, 1] = 1

# Stack two dataset vertically
data = np.vstack([data_case, data_control])

# Compute affinity matrix for each source
n_instances, n_features = data.shape
affs = []
flag_sigma = 'global'
for j in range(n_features):
    sigma,tmp = compute_affinity(data[:,j].reshape(n_instances,1),\
            flag_sigma=flag_sigma)
    affs.append(tmp)
    print j

# Save information
# file_pkl = open("copd_all.pkl","wb")
# pickle.dump([data,features_name,affs,label_e],file_pkl)
# file_pkl.close()

v_lambda_range = np.arange(0, 10, 0.5)
v_mu_range = [0]
dim_q = 4
tol = 1e-6
n_iter_max = 200

# Store iteration results
Esempio n. 13
0
    feat_gabor[i,:] = compute_feats(img[i],kernels).reshape(1,32)
    #print i

# PCA on Gabor features
#pca = PCA(n_components=4)
#feat_gabor = pca.fit_transform(feat_gabor)
#print "Variance Ratio: ",sum(pca.explained_variance_ratio_)

#feat_gabor = scale(feat_gabor)
# Save Gabor Features
file_pkl = open("face_gabor.pkl","wb")
pickle.dump(feat_gabor,file_pkl)
file_pkl.close()

# Compute affinity matrix
flag_sigma = 'global'

sigma_gabor, aff_gabor = compute_affinity(feat_gabor,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)

print "kernel computation finished"

label_pred_identity = spectral_clustering(aff_gabor,n_clusters=20)
nmi_identity = nmi(label_pred_identity,img_identity)
print "NMI with identity: ",nmi_identity

label_pred_pose = spectral_clustering(aff_gabor,n_clusters=4)
nmi_pose = nmi(label_pred_pose,img_pose)
print "NMI with pose: ",nmi_pose

Esempio n. 14
0
print img_fft.shape
# PCA on FFT features
pca = PCA(n_components=14)
feat_fft = pca.fit_transform(img_fft)
print "Variance Ratio: ", sum(pca.explained_variance_ratio_)

# Normalization of features
#feat_fft = scale(feat_fft)

# save FFT data
file_pkl = open("face_fft.pkl", "wb")
pickle.dump(feat_fft, file_pkl)
file_pkl.close()

# Compute kernel matrix for FFT data
flag_sigma = 'global'

sigma_fft, aff_fft = compute_affinity(feat_fft,flag_sigma=flag_sigma,\
        sigma=336.,nn=8)
if flag_sigma == 'local':
    sigma_fft_init = sum(sigma_fft**2) / len(sigma_fft)
    print "sigma_fft_init: ", sigma_fft_init

label_pred_identity = spectral_clustering(aff_fft, n_clusters=20)
nmi_identity = nmi(label_pred_identity, img_identity)

label_pred_pose = spectral_clustering(aff_fft, n_clusters=4)
nmi_pose = nmi(label_pred_pose, img_pose)

print "nmi_identity", nmi_identity, "nmi_pose", nmi_pose
Esempio n. 15
0
    img[i] = (img[i] - img[i].min()) / (img[i].max() - img[i].min())
    feat_gabor[i, :] = compute_feats(img[i], kernels).reshape(1, 32)
    #print i

# PCA on Gabor features
#pca = PCA(n_components=4)
#feat_gabor = pca.fit_transform(feat_gabor)
#print "Variance Ratio: ",sum(pca.explained_variance_ratio_)

#feat_gabor = scale(feat_gabor)
# Save Gabor Features
file_pkl = open("face_gabor.pkl", "wb")
pickle.dump(feat_gabor, file_pkl)
file_pkl.close()

# Compute affinity matrix
flag_sigma = 'global'

sigma_gabor, aff_gabor = compute_affinity(feat_gabor,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)

print "kernel computation finished"

label_pred_identity = spectral_clustering(aff_gabor, n_clusters=20)
nmi_identity = nmi(label_pred_identity, img_identity)
print "NMI with identity: ", nmi_identity

label_pred_pose = spectral_clustering(aff_gabor, n_clusters=4)
nmi_pose = nmi(label_pred_pose, img_pose)
print "NMI with pose: ", nmi_pose
Esempio n. 16
0
# PCA on FFT features
pca = PCA(n_components=14)
feat_fft = pca.fit_transform(img_fft)
print "Variance Ratio: ",sum(pca.explained_variance_ratio_)

# Normalization of features
#feat_fft = scale(feat_fft)

# save FFT data
file_pkl = open("face_fft.pkl","wb")
pickle.dump(feat_fft,file_pkl)
file_pkl.close()

# Compute kernel matrix for FFT data
flag_sigma = 'global'

sigma_fft, aff_fft = compute_affinity(feat_fft,flag_sigma=flag_sigma,\
        sigma=336.,nn=8)
if flag_sigma == 'local':
    sigma_fft_init = sum(sigma_fft**2)/len(sigma_fft)
    print "sigma_fft_init: ",sigma_fft_init

label_pred_identity = spectral_clustering(aff_fft,n_clusters=20)
nmi_identity = nmi(label_pred_identity,img_identity)

label_pred_pose = spectral_clustering(aff_fft,n_clusters=4)
nmi_pose = nmi(label_pred_pose,img_pose)

print "nmi_identity",nmi_identity,"nmi_pose",nmi_pose

Esempio n. 17
0
label_e = np.hstack([np.zeros((1,1000)),np.ones((1,1000))])[0]
Y = np.zeros((2000,2))
for i in range(0,1000):
    Y[i,0] = 1
for i in range(1000,2000):
    Y[i,1] = 1

# Stack two dataset vertically
data = np.vstack([data_case,data_control])

# Compute affinity matrix for each source
n_instances,n_features = data.shape
affs = []
flag_sigma = 'global'
for j in range(n_features):
    sigma,tmp = compute_affinity(data[:,j].reshape(n_instances,1),\
            flag_sigma=flag_sigma)
    affs.append(tmp)
    print j

# Save information
# file_pkl = open("copd_all.pkl","wb")
# pickle.dump([data,features_name,affs,label_e],file_pkl)
# file_pkl.close()

v_lambda_range = np.arange(0,10,0.5)
v_mu_range = [0]
dim_q = 4
tol = 1e-6
n_iter_max = 200

# Store iteration results
Esempio n. 18
0
    #feat_lbp.append(mahotas.features.lbp(img[i],1,8))
feat_lbp = scale(np.array(feat_lbp))

# PCA on LBP features
#pca = PCA(n_components=20)
#feat_lbp = pca.fit_transform(feat_lbp)
#print "Variance Ratio: ",sum(pca.explained_variance_ratio_)

# Normalization of features
#feat_lbp = scale(feat_lbp)

# Save LBP features
file_pkl = open("face_lbp.pkl","wb")
pickle.dump(feat_lbp,file_pkl)
file_pkl.close()

# Compute affinity matrix
flag_sigma = 'global'
sigma_lbp, aff_lbp = compute_affinity(feat_lbp,flag_sigma=flag_sigma,\
        sigma=100.,nn=8)
print "kernel computation finished"

label_pred_identity = spectral_clustering(aff_lbp,n_clusters=20)
nmi_identity = nmi(label_pred_identity,img_identity)
print "NMI with identity: ",nmi_identity

label_pred_pose = spectral_clustering(aff_lbp,n_clusters=4)
nmi_pose = nmi(label_pred_pose,img_pose)
print "NMI with pose: ",nmi_pose

Esempio n. 19
0
pickle.dump([img,img_name,img_identity,img_pose,img_expression,img_eye,\
        identity,pose,expression,eye],file_pkl)
file_pkl.close()

# Normalization of each image
for i in range(img.shape[0]):
    img[i] = (img[i] - img[i].min()) * 1. / (img[i].max() - img[i].min())

img = img.reshape(img.shape[0], img.shape[1] * img.shape[2])
img = scale(img)

# 'global','local','manual'
flag_sigma = 'global'

# Compute similarity matrix
sigma, aff_img = compute_affinity(img, flag_sigma=flag_sigma, sigma=100., nn=7)
if flag_sigma == 'local':
    sigma_init = sum(sigma**2) / len(sigma)
    print "Average Sigma(local): ", sigma_init

K = 20
# Construct existing solution Y
Y = np.zeros((img.shape[0], 20))
for i in range(img.shape[0]):
    Y[i, img_identity[i]] = 1
val_lambda = 1.2
arr_tmp = val_lambda * Y.dot(Y.T)

label_pred_identity = spectral_clustering(aff_img, n_clusters=K)
nmi_identity = nmi(label_pred_identity, img_identity)
print nmi_identity