means = np.mean(a, 0) sds = np.std(a, 0) U, S, V = numpy.linalg.svd((a[:,:] - means[np.newaxis, :]) / sds[np.newaxis, :], full_matrices = False) components = V.T[:, :nkernels] components = components.transpose() for i in range(nkernels): pixels = components[i,:(19*19)].reshape(19,19) log = components[i,(19*19):(19*19*2)].reshape(19,19) gauss = components[i,(19*19*2):].reshape(9,9) pylab.subplot(8, 18, i*3+1).imshow(pixels) pylab.subplot(8, 18, i*3+2).imshow(log) pylab.subplot(8, 18, i*3+3).imshow(gauss) if "components" in h5_file.keys(): del h5_file["components"] if "feature_means" in h5_file.keys(): del h5_file["feature_means"] if "feature_sds" in h5_file.keys(): del h5_file["feature_sds"] h5_file.create_dataset("components", data = components) h5_file.create_dataset("feature_means", data = means) h5_file.create_dataset("feature_sds", data = sds) h5_file.close() pylab.savefig("../kernels.pdf") else: means = h5_file["feature_means"][:] sds = h5_file["feature_sds"][:] def normalize(features):
from vigra.learning import RandomForest import numpy as np import h5py import sys from tiffcvt import h5_file if __name__=="__main__": clf = RandomForest(treeCount=40) training_set = h5_file["training_features"][:,:].astype(np.float32) training_class = h5_file["training_classification"][:].astype(np.uint32) if len(sys.argv) > 1 and sys.argv[1] == "eigentexture": from eigentexture import normalize training_set = normalize(training_set) components = h5_file["components"][:,:].transpose() training_set = np.dot(training_set, components).astype(np.float32) classifier_name = "etclassifier" else: classifier_name = "classifier" clf.learnRF(training_set, training_class) if classifier_name in h5_file.keys(): del h5_file[classifier_name] h5_file.close() clf.writeHDF5('../challenge.h5', "/"+classifier_name, True) else: classifier = RandomForest("../challenge.h5", "/classifier") et_classifier = RandomForest("../challenge.h5", "/etclassifier")