예제 #1
0
def getDictionary(imgPaths, alpha, K, method):

    ncores = 4
    data = pickle.load(open('../data/traintest.pkl', 'rb'))
    train = data['train_imagenames']
    filters = createFilterBank()
    testset = train[0:2]

    #whatname = __name__

    print(('Starting a pool of workers with %d cores\n') % ncores)
    if __name__ == 'getDictionary':
        results = Parallel(n_jobs=ncores, verbose=11)(
            delayed(filterPoints)(method, filters, alpha, K, imgPaths, name)
            for name in train)

    pixelResponses = np.vstack((results))

    d = sklearn.cluster.KMeans(n_clusters=K).fit(pixelResponses)
    dictionary = d.cluster_centers_

    fout = os.path.join(imgPaths, 'dictionary' + method + '.npz')
    np.savez(fout, dictionary=dictionary, filterBank=filters)

    return dictionary
예제 #2
0
    from createFilterBank import createFilterBank
    #from extractFilterResponses import extractFilterResponses
    from getRandomPoints import getRandomPoints
    from getHarrisPoints import getHarrisPoints

    alpha = 50  #Number of points desired
    k = 0.04  #Should be 0.04-0.06 is good
    K = 100  #Number of clusters
    method = 'Random'
    #testin Q1.3
    imgPaths = '../data/'
    ncores = 4
    data = pickle.load(open('../data/traintest.pkl', 'rb'))
    train = data['train_imagenames']
    filters = createFilterBank()
    testset = train[0:4]

    #whatname = __name__

    print(('Starting a pool of workers with %d cores\n') % ncores)

    results = Parallel(n_jobs=ncores, verbose=11)(
        delayed(filterPoints)(method, filters, alpha, K, imgPaths, name)
        for name in testset)
    #results = Parallel(n_jobs=ncores, verbose=11)(map(delayed(filterPoints),testset))

    pixelResponses = np.vstack((results))

    d = sklearn.cluster.KMeans(n_clusters=K).fit(pixelResponses)
    dictionary = d.cluster_centers_
예제 #3
0
    wordMap = getVisualWords(img, filterBank, dictionary)
    np.savez(fname2, wordMap=wordMap)


# number of cores to use
# set this value appropriately
ncores = 4
source = '../data/'
methods = ['Random', 'Harris']

# load the files and texton dictionary
data = pickle.load(open('../data/traintest.pkl', 'rb'))
mapping = data['mapping']
all_imagenames = data['all_imagenames']
nimages = len(all_imagenames)
filterBank = createFilterBank()

for method in methods:
    destination = os.path.join('../intermediate/', method)
    dictionary = np.load('dictionary' + method + '.npz')
    dictionary = dictionary['dictionary']

    if os.path.isdir(destination) is False:
        os.mkdir(destination)

    for category in mapping:
        if os.path.isdir(os.path.join(destination, category)) is False:
            os.mkdir(os.path.join(destination, category))

    print(('Starting a pool of workers with %d cores\n') % ncores)
    results = Parallel(n_jobs=ncores)(
예제 #4
0

def rgb2gray(rgb):
    return np.dot(rgb[..., :3], [0.299, 0.587, 0.114])


ncores = 4
source = '../data/'
method = 'Harris'

# load the files and texton dictionary
data = pickle.load(open('../data/traintest.pkl', 'rb'))
mapping = data['mapping']
all_imagenames = data['all_imagenames']
nimages = len(all_imagenames)
filterz = createFilterBank()

dictionary = np.load('dictionary' + method + '.npz')
dictionary = dictionary['dictionary']

fname = all_imagenames[0]

fname1 = os.path.join(source, fname)
#fname2 = os.path.join(destination, os.path.splitext(fname)[0] + '.npz')
img = sp.ndimage.imread(fname1)
#if img.ndim == 3:
#    img = rgb2gray(img)

#img = np.float64(img) / 255.0
wordMap = getVisualWords(img, dictionary, filterz)
done = 1