Exemple #1
0
def store_instances(layers, compression):
    conn = psycopg2.connect(dbname=utils.dbname,
                            user=utils.user,
                            password=utils.password,
                            host=utils.host)
    cur = conn.cursor()

    start_time = time.clock()

    for layer in layers:
        # CREATE THE TABLE
        table_name = create_table_name(compression, layer)
        insert_command = "INSERT INTO " + table_name + " (file, class,"
        values_sql = "VALUES(%s,%s,"

        dimensions = utils.get_dimension_options(layer, compression)
        if len(dimensions) == 0:
            print 'POSSIBLE ERROR: No dimensions loaded for ', layer, ' with ', compression
            continue

        for dim in dimensions:
            insert_command += create_feature_name(dim) + ","
            values_sql += "%s,"

        values_sql = values_sql[:-1] + ");"
        insert_command = insert_command[:-1] + ") " + values_sql

        print insert_command

        # INSERT DATA INTO TABLE

        # load the data
        X, ids = utils.load_instance_features(layer)
        scalar = utils.load_scalar(layer)

        X = scalar.transform(X)

        transforms = []
        # apply the compression algorithm
        for dim in dimensions:
            compressor = utils.load_compressor(layer, dim, compression)
            transforms.append(compressor.transform(X))

        value = []
        for i in range(X.shape[0]):
            file_name = ids[i]
            value = [file_name, -1]
            for X_prime in transforms:
                value.append(X_prime[i, :].tolist())

            cur.execute(insert_command, value)

        conn.commit()

    cur.close()
    conn.close()

    print 'Total Time : ', time.clock() - start_time
Exemple #2
0
def store_instances(layers, compression):
    conn = psycopg2.connect(dbname=utils.dbname, user=utils.user, password=utils.password, host=utils.host)
    cur = conn.cursor()

    start_time = time.clock()

    for layer in layers:
        # CREATE THE TABLE
        table_name = create_table_name(compression, layer)
        insert_command = "INSERT INTO " + table_name + " (file, class,"
        values_sql = "VALUES(%s,%s,"

        dimensions = utils.get_dimension_options(layer, compression)
        if len(dimensions) == 0:
            print 'POSSIBLE ERROR: No dimensions loaded for ', layer, ' with ', compression
            continue

        for dim in dimensions:
            insert_command += create_feature_name(dim) + ","
            values_sql += "%s,"

        values_sql = values_sql[:-1] + ");"
        insert_command = insert_command[:-1] + ") " + values_sql

        print insert_command

        # INSERT DATA INTO TABLE

        # load the data
        X, ids = utils.load_instance_features(layer)
        scalar = utils.load_scalar(layer)

        X = scalar.transform(X)

        transforms = []
        # apply the compression algorithm
        for dim in dimensions:
            compressor = utils.load_compressor(layer, dim, compression)
            transforms.append(compressor.transform(X))

        value = []
        for i in range(X.shape[0]):
            file_name = ids[i]
            value = [file_name, -1]
            for X_prime in transforms:
                value.append(X_prime[i, :].tolist())

            cur.execute(insert_command, value)

        conn.commit()

    cur.close()
    conn.close()

    print 'Total Time : ', time.clock() - start_time
import os
import numpy as np
import caffe


image_file = 'ubot5_1.JPG'
layer = 'pool5'
dimension = 256
compression = 'pca'
k = 10

compressor = utils.load_compressor(layer=layer,
                                   dimension=dimension,
                                   compression=compression)

scalar = utils.load_scalar(layer=layer)

net, params, blobs = utils.load_network()

input_image = caffe.io.load_image(os.path.join(utils.instances_dir, image_file))

# predict takes any number of images, and formats them for the Caffe net automatically
prediction = net.predict([input_image], oversample=False)
feat = net.blobs[layer].data[0].ravel()
feat = scalar.transform(feat)

comp_feat = compressor.transform(feat).ravel()

results = sql.query_top_k(k=k,
                          features=comp_feat,
                          compression=compression,
        #ids = ids[0:500]
        #===== Convert ids (filename) to labels(integer) =========
        labels = []
        print len(ids)
        for i in range(len(ids)):
            labels.append(int(ids[i].split('_')[0].split('n')[1]))

        labels = matlab.double(labels)

        #======== Start MATLAB  ============
        print 'start matlab'
        eng = matlab.engine.start_matlab()

        for n_com in n_components:

            scalar = utils.load_scalar(layer)
            compressor = utils.load_compressor(layer=layer,
                                               dimension=n_com,
                                               compression=c_type)
            X = scalar.transform(X0)
            comp_X = compressor.transform(X)
            #comp_X = comp_X[0:500,:]

            #===== Convert ndarray to list so that we can pass the variable to matalb function =========
            comp_X = comp_X.tolist()
            comp_X = matlab.double(comp_X)

            print '===============================start tsne in python ==================================='
            print layer, n_com, c_type

            result = eng.tsne_traning_python(comp_X, labels, dimensions, layer,
    lsh = LSHash(128, np.shape(testSet[0])[0], matrices_filename='lsh_planes.data.npz', overwrite=True)

    for idx, input_point in enumerate(testSet):
        hastValue = lsh._hash(lsh.uniform_planes[0], input_point.tolist())
        print hastValue

        lsh.index(input_point, idx)

    print lsh.query(testSet[3], 3)

    return None


if __name__ == '__main__':
    import utils

    nBits = 4096

    scalar = utils.load_scalar(layer='fc7')

    fileName = os.path.join(utils.lsh_planes_dir, 'randomPlanesBias' + str(nBits))
    var = np.mean(scalar.std_)
    generatePlanesWithBias(fileName, nBits, 4096, var)

    # trueIds, testSet = utils.load_test_set('fc7', 'raw', 0)
    dataset, ids = utils.load_feature_layer('fc7')
    labels = utils.load_train_class_labels()

    hashes = generateHashes(dataset, scalar, 'randomPlanesBias' + str(nBits), nBits)
    storeHashesInDb(ids, labels, hashes, 'lsh_fc7', nBits)
Exemple #6
0
def store_feature(layers, compression):
    conn = psycopg2.connect(dbname=utils.dbname, user=utils.user, password=utils.password, host=utils.host)
    cur = conn.cursor()

    start_time = time.clock()

    train_labels = utils.load_train_class_labels()
    for layer in layers:
        # CREATE THE TABLE
        table_name = create_table_name(compression, layer)
        cur.execute("DROP TABLE IF EXISTS " + table_name + ";")
        table_command = "CREATE TABLE " + table_name + " (id serial PRIMARY KEY, file text, class integer, "
        insert_command = "INSERT INTO " + table_name + " (file, class,"
        values_sql = "VALUES(%s,%s,"

        dimensions = utils.get_dimension_options(layer, compression)
        if len(dimensions) == 0:
            print 'POSSIBLE ERROR: No dimensions loaded for ', layer, ' with ', compression
            continue

        for dim in dimensions:
            table_command += create_feature_column(dim)
            insert_command += create_feature_name(dim) + ","
            values_sql += "%s,"

        table_command = table_command[:-1] + ");"
        values_sql = values_sql[:-1] + ");"
        insert_command = insert_command[:-1] + ") " + values_sql

        print table_command
        print insert_command

        cur.execute(table_command)

        # INSERT DATA INTO TABLE

        # load the data
        X, imagenet_ids = utils.load_feature_layer(layer)
        scalar = utils.load_scalar(layer)

        X = scalar.transform(X)

        transforms = []
        # apply the compression algorithm
        for dim in dimensions:
            compressor = utils.load_compressor(layer, dim, compression)
            transforms.append(compressor.transform(X))

        value = []
        for i in range(X.shape[0]):
            file_name = imagenet_ids[i]
            value = [file_name, train_labels[file_name]]
            for X_prime in transforms:
                value.append(X_prime[i, :].tolist())

            cur.execute(insert_command, value)

        conn.commit()

    cur.close()
    conn.close()

    print 'Done Creating Tables'
    print 'Total Time : ', time.clock() - start_time
Exemple #7
0
def store_tsne_feature(layers, compression, tsne_dim):
    conn = psycopg2.connect(dbname=utils.dbname,
                            user=utils.user,
                            password=utils.password,
                            host=utils.host)
    cur = conn.cursor()

    start_time = time.clock()
    eng = matlab.engine.start_matlab()

    train_labels = utils.load_train_class_labels()
    for layer in layers:
        # CREATE THE TABLE
        table_name = create_table_name(compression, layer)
        cur.execute("DROP TABLE IF EXISTS " + table_name + ";")
        table_command = "CREATE TABLE " + table_name + " (id serial PRIMARY KEY, file text, class integer, "
        insert_command = "INSERT INTO " + table_name + " (file, class,"
        values_sql = "VALUES(%s,%s,"

        if compression == 'tsne':
            dimensions = [64]  #[64, 128, 256]
        else:
            dimensions = utils.get_dimension_options(layer, compression)
        if len(dimensions) == 0:
            print 'POSSIBLE ERROR: No dimensions loaded for ', layer, ' with ', compression
            continue

        for dim in dimensions:
            table_command += create_feature_column(dim)
            insert_command += create_feature_name(dim) + ","
            values_sql += "%s,"

        table_command = table_command[:-1] + ");"
        values_sql = values_sql[:-1] + ");"
        insert_command = insert_command[:-1] + ") " + values_sql

        #print table_command
        #print insert_command

        cur.execute(table_command)

        # INSERT DATA INTO TABLE

        # load the data
        X, imagenet_ids = utils.load_feature_layer(layer)
        scalar = utils.load_scalar(layer)

        X = scalar.transform(X)

        X = X[keep_idxs]
        imagenet_ids = np.asarray(imagenet_ids, dtype=np.object)
        imagenet_ids = imagenet_ids[keep_idxs]

        transforms = []
        # apply the compression algorithm
        for dim in dimensions:
            if compression == 'tsne':
                print 'tsne'
                compressor = utils.load_compressor(layer, dim, 'pca')
                #utils.plot_tsne_features('fc7',64)

                comp_X = compressor.transform(X)
                comp_X = comp_X.tolist()

                comp_X = matlab.double(comp_X)
                comp_X = eng.tsne_testing_python(comp_X, tsne_dim, layer, dim,
                                                 'pca')
                comp_X = np.array(comp_X)
                print comp_X
                transforms.append(comp_X)
            else:
                compressor = utils.load_compressor(layer, dim, compression)
                transforms.append(compressor.transform(X))

        value = []
        for i in range(X.shape[0]):
            file_name = imagenet_ids[i]
            value = [file_name, train_labels[file_name]]
            for X_prime in transforms:
                value.append(X_prime[i, :].tolist())

            cur.execute(insert_command, value)

        conn.commit()

    cur.close()
    conn.close()
    eng.exit()
    print 'Done Creating Tables'
    print 'Total Time : ', time.clock() - start_time
Exemple #8
0
    for idx, input_point in enumerate(testSet):
        hastValue = lsh._hash(lsh.uniform_planes[0], input_point.tolist())
        print hastValue

        lsh.index(input_point, idx)

    print lsh.query(testSet[3], 3)

    return None


if __name__ == '__main__':
    import utils

    nBits = 4096

    scalar = utils.load_scalar(layer='fc7')

    fileName = os.path.join(utils.lsh_planes_dir,
                            'randomPlanesBias' + str(nBits))
    var = np.mean(scalar.std_)
    generatePlanesWithBias(fileName, nBits, 4096, var)

    # trueIds, testSet = utils.load_test_set('fc7', 'raw', 0)
    dataset, ids = utils.load_feature_layer('fc7')
    labels = utils.load_train_class_labels()

    hashes = generateHashes(dataset, scalar, 'randomPlanesBias' + str(nBits),
                            nBits)
    storeHashesInDb(ids, labels, hashes, 'lsh_fc7', nBits)
Exemple #9
0
def store_feature(layers, compression):
    conn = psycopg2.connect(dbname=utils.dbname,
                            user=utils.user,
                            password=utils.password,
                            host=utils.host)
    cur = conn.cursor()

    start_time = time.clock()

    train_labels = utils.load_train_class_labels()
    for layer in layers:
        # CREATE THE TABLE
        table_name = create_table_name(compression, layer)
        cur.execute("DROP TABLE IF EXISTS " + table_name + ";")
        table_command = "CREATE TABLE " + table_name + " (id serial PRIMARY KEY, file text, class integer, "
        insert_command = "INSERT INTO " + table_name + " (file, class,"
        values_sql = "VALUES(%s,%s,"

        dimensions = utils.get_dimension_options(layer, compression)
        if len(dimensions) == 0:
            print 'POSSIBLE ERROR: No dimensions loaded for ', layer, ' with ', compression
            continue

        for dim in dimensions:
            table_command += create_feature_column(dim)
            insert_command += create_feature_name(dim) + ","
            values_sql += "%s,"

        table_command = table_command[:-1] + ");"
        values_sql = values_sql[:-1] + ");"
        insert_command = insert_command[:-1] + ") " + values_sql

        print table_command
        print insert_command

        cur.execute(table_command)

        # INSERT DATA INTO TABLE

        # load the data
        X, imagenet_ids = utils.load_feature_layer(layer)
        scalar = utils.load_scalar(layer)

        X = scalar.transform(X)

        transforms = []
        # apply the compression algorithm
        for dim in dimensions:
            compressor = utils.load_compressor(layer, dim, compression)
            transforms.append(compressor.transform(X))

        value = []
        for i in range(X.shape[0]):
            file_name = imagenet_ids[i]
            value = [file_name, train_labels[file_name]]
            for X_prime in transforms:
                value.append(X_prime[i, :].tolist())

            cur.execute(insert_command, value)

        conn.commit()

    cur.close()
    conn.close()

    print 'Done Creating Tables'
    print 'Total Time : ', time.clock() - start_time
Exemple #10
0
def store_tsne_feature(layers, compression, tsne_dim):
    conn = psycopg2.connect(dbname=utils.dbname, user=utils.user, password=utils.password, host=utils.host)
    cur = conn.cursor()

    start_time = time.clock()
    eng = matlab.engine.start_matlab()

    train_labels = utils.load_train_class_labels()
    for layer in layers:
        # CREATE THE TABLE
        table_name = create_table_name(compression, layer)
        cur.execute("DROP TABLE IF EXISTS " + table_name + ";")
        table_command = "CREATE TABLE " + table_name + " (id serial PRIMARY KEY, file text, class integer, "
        insert_command = "INSERT INTO " + table_name + " (file, class,"
        values_sql = "VALUES(%s,%s,"

        if compression == 'tsne':
            dimensions = [64] #[64, 128, 256]
        else:
            dimensions = utils.get_dimension_options(layer, compression)
        if len(dimensions) == 0:
            print 'POSSIBLE ERROR: No dimensions loaded for ', layer, ' with ', compression
            continue

        for dim in dimensions:
            table_command += create_feature_column(dim)
            insert_command += create_feature_name(dim) + ","
            values_sql += "%s,"

        table_command = table_command[:-1] + ");"
        values_sql = values_sql[:-1] + ");"
        insert_command = insert_command[:-1] + ") " + values_sql

        #print table_command
        #print insert_command

        cur.execute(table_command)

        # INSERT DATA INTO TABLE

        # load the data
        X, imagenet_ids = utils.load_feature_layer(layer)
        scalar = utils.load_scalar(layer)

        X = scalar.transform(X)

        X = X[keep_idxs]
        imagenet_ids = np.asarray(imagenet_ids, dtype=np.object)
        imagenet_ids = imagenet_ids[keep_idxs]



        transforms = []
        # apply the compression algorithm
        for dim in dimensions:
            if compression == 'tsne':
                print 'tsne'
                compressor = utils.load_compressor(layer, dim, 'pca')
                #utils.plot_tsne_features('fc7',64)

                comp_X = compressor.transform(X)
                comp_X = comp_X.tolist()

                comp_X = matlab.double(comp_X)
                comp_X = eng.tsne_testing_python(comp_X, tsne_dim, layer, dim, 'pca')
                comp_X = np.array(comp_X)
                print comp_X
                transforms.append(comp_X)
            else:
                compressor = utils.load_compressor(layer, dim, compression)
                transforms.append(compressor.transform(X))

        value = []
        for i in range(X.shape[0]):
            file_name = imagenet_ids[i]
            value = [file_name, train_labels[file_name]]
            for X_prime in transforms:
                value.append(X_prime[i, :].tolist())

            cur.execute(insert_command, value)

        conn.commit()

    cur.close()
    conn.close()
    eng.exit()
    print 'Done Creating Tables'
    print 'Total Time : ', time.clock() - start_time