Esempio n. 1
0
def slic2(im, S, m=20, L_scale=0.5, mean_scale=1.0, std_scale=3.0):
    '''
    This is a k-means based super pixel algorithm inspired by slic.
    
    http://ivrg.epfl.ch/supplementary_material/RK_SLICSuperpixels/index.html
    '''
    cvmat = im.asOpenCV2()

    # Compute a label map and assign initial labels
    label_map, xgroups, ygroups, labels = _assignInitialPoints(cvmat, S)
    print label_map

    # Compute color features
    mat = cv2.cvtColor(cvmat, cv2.cv.CV_BGR2Lab)
    h, w, c = cvmat.shape

    # Compute location features
    x = np.arange(w).reshape(1, w) * np.ones(h, dtype=np.int).reshape(h, 1)
    y = np.arange(h).reshape(h, 1) * np.ones(w, dtype=np.int).reshape(1, w)

    # Scale the features
    mat = mat

    # Compute local statistics
    mean_L = mat[:, :, 0].copy()
    mean_L = ndi.gaussian_filter(mean_L, 0.5 * S)
    std_L = (mat[:, :, 0].copy() - mean_L)**2
    std_L = np.sqrt(ndi.gaussian_filter(std_L, 0.5 * S))

    mean_a = mat[:, :, 1].copy()
    mean_a = ndi.gaussian_filter(mean_a, 0.5 * S)
    std_a = (mat[:, :, 1].copy() - mean_a)**2
    std_a = np.sqrt(ndi.gaussian_filter(std_a, 0.5 * S))

    mean_b = mat[:, :, 2].copy()
    mean_b = ndi.gaussian_filter(mean_b, 0.5 * S)
    std_b = (mat[:, :, 2].copy() - mean_b)**2
    std_b = np.sqrt(ndi.gaussian_filter(std_b, 0.5 * S))

    # Create a feature vector matrix
    features = np.array([
        x,
        y,
        mat[:, :, 0],
        mat[:, :, 1],
        mat[:, :, 2],
    ])
    #features = np.array([x,y, L_scale*mat[:,:,0],mat[:,:,1],mat[:,:,2],mean_scale*mean_L,std_scale*std_L,mean_scale*mean_a,std_scale*std_a,mean_scale*mean_b,std_scale*std_b])

    for i in range(10):
        # Compute centriods
        timer = pv.Timer()
        centroids = _computeCentriods(features, labels, label_map)
        timer.mark('centroids')
        labels = _computeLabels(features, label_map, centroids, xgroups,
                                ygroups, S, m)
        timer.mark('labels')
    mask = 9 * labels != ndi.correlate(labels,
                                       [[1, 1, 1], [1, 1, 1], [1, 1, 1]])
    return labels.T, centroids, mask.T
Esempio n. 2
0
    def test_2_SURF(self):
        '''SURF Taz: .......................................................'''
        ilog = None
        if 'ilog' in globals().keys():
            ilog = globals()['ilog']

        filename = os.path.join(pv.__path__[0], 'data', 'test', 'TAZ_0010.jpg')
        im = pv.Image(filename)
        timer = pv.Timer()
        keypoints, descriptors = pv.surf(im)
        timer.mark("TazSurf")
        if ilog != None:
            ilog(timer, "SURFTaz")
        for each in keypoints:
            im.annotateCircle(pv.Point(each[0][0], each[0][1]), each[2])
        if ilog != None:
            ilog(im, 'SurfKeypoints')

        self.assertEqual(len(keypoints), len(descriptors))
        self.assertEqual(len(keypoints), 367)
Esempio n. 3
0
    def testReallyLargeTree(self):
        '''Tests using a really large tree.'''
        timer = pv.Timer()
        N = 100000
        K = 5
        points = np.random.uniform(size=(N,K))
        
        timer.mark("Tree Build Start")
        kdtree = FLANNTree(points)
        timer.mark("Tree Build Time")
        knn = KNearestNeighbors(points)
        timer.mark("Linear Build Time")

        timer.mark("Query Start")
        fdist,_ = kdtree.query([.05,.3,.9,.6,.2],k=4)
        timer.mark("KDTree Query")

        timer.mark("Query Start")
        _,_ = knn.query([.05,.3,.9,.6,.2],k=fdist[-1])
        timer.mark("Brute Force Query")
Esempio n. 4
0
    def testLargeTree(self):
        '''Tests using a large tree.'''
        timer = pv.Timer()
        #N = 300
        #K = 5
        points = TEST_POINTS_5D
        
        timer.mark("Tree Build Start")
        kdtree = FLANNTree(points)
        timer.mark("Tree Build Time")
        knn = KNearestNeighbors(points)
        timer.mark("Linear Build Time")

        timer.mark("Query Start")
        fdist,_ = kdtree.query([.05,.3,.9,.6,.2],k=4)
        timer.mark("KDTree Query")

        timer.mark("Query Start")
        _,_ = knn.query([.05,.3,.9,.6,.2],k=fdist[-1])
        timer.mark("Brute Force Query")
Esempio n. 5
0
    def testReallyHighDim(self):
        '''test using really high dimensions'''
        timer = pv.Timer()
        N = 1000
        K = 300
        points = np.random.uniform(size=(N,K))
        query = np.random.uniform(size=K)
        
        timer.mark("Tree Build Start")
        kdtree = FLANNTree(points)
        timer.mark("Tree Build Time")
        knn = KNearestNeighbors(points)
        timer.mark("Linear Build Time")

        timer.mark("Query Start")
        fdist,_ = kdtree.query(query,k=4,max_bins=100)
        timer.mark("KDTree Query")

        timer.mark("Query Start")
        _,_ = knn.query(query,k=fdist[-1])
        timer.mark("Brute Force Query")
Esempio n. 6
0
    def detect(self, img, face_records, options):
        '''Run a face detector and return rectangles.'''

        # Load the model
        mat = img
        #print('options<',options,'>')
        #thresh = options.threshold

        #result_id = 0

        #im = mat #cv2.imread(pathname)
        timer = pv.Timer()

        # Run the network in the worker processes...
        dets = self.runNetwork(mat, options)

        #dets = as_result.get()

        dets[:, 2] = dets[:, 2] - dets[:, 0] + 1
        dets[:, 3] = dets[:, 3] - dets[:, 1] + 1
        timer.mark("End Detection")

        #print('dets')
        # Now process each face we found and add a face to the records list.
        for k, d in enumerate(dets):
            face_record = face_records.face_records.add()
            face_record.detection.score = d[4]
            face_record.detection.location.CopyFrom(
                pt.rect_val2proto(d[0], d[1], d[2], d[3]))
            face_record.detection.detection_id = k
            face_record.detection.detection_class = "FACE"

        if options.best:
            face_records.face_records.sort(key=lambda x: -x.detection.score)
            while len(face_records.face_records) > 1:
                del face_records.face_records[-1]
Esempio n. 7
0
# In this tutorial we will demonstraite how to use pv.Image to convert images
# to different formates and in each format we will perform a simple image
# processing task of thresholding an image to produce a black and white
# equivalent.

if __name__ == "__main__":
    # Image logs are used to save images and other data to a directory
    # for later analysis.  These logs are valuable tools for understanding
    # the imagery and debugging algorithms.  Unless otherwise specified,
    # ImageLogs are usually created in the directory "/tmp".
    ilog = pv.ImageLog()

    # Timers keep a record of the time required for algorithms to execute
    # and help determine runtimes and can determine which parts of algorithms
    # are to slow and need optimization.
    timer = pv.Timer()

    # The filename for the baboon image
    filename = os.path.join(pv.__path__[0], 'data', 'misc', 'baboon.jpg')

    # If a string is passed a to the initializer it will assume that is a
    # path and will read the image from that file.  The image is usually read
    # from disk using PIL and then stored as a PIL image.
    im = pv.Image(filename)

    # This command saves the image to an image log which provides good
    # information for debugging.  It is often helpful to save many images
    # during a processing to make sure that each step is producing the
    # intended result.
    ilog(im, "OriginalImage")
Esempio n. 8
0
def FRGCExp4Test(database, algorithm, face_detector=None, eye_locator=None, n=None,verbose=10.0,ilog=None):
    ''' 
    Run the FRGC Experiment 4 Test 
    
    On completion this will produce a BEE distance matrix.
    '''
    message_time = time.time()
    timer = pv.Timer()

    # Produce face records for each image in the query set
    query_keys = database.query()
    if n != None:
        query_keys = query_keys[:n]
    query_recs = []
    timer.mark("QueryStart")
    i = 0
    for key in query_keys:
        i += 1
        face = database[key]
        
        face_rec = algorithm.getFaceRecord(face.image,None,face.left_eye,face.right_eye)
        query_recs.append(face_rec)    
        if verbose:
            if time.time() - message_time > verbose:
                message_time = time.time()
                print "Processed query image %d of %d"%(i,len(query_keys))
                
    timer.mark("QueryStop",notes="Processed %d images."%len(query_keys))
    
    
    # Produce face records for each image in the target set
    message_time = time.time()
    target_keys = database.target()
    if n != None:
        target_keys = target_keys[:n]
    target_recs = []
    timer.mark("TargetStart")
    i = 0
    for key in target_keys:
        i += 1
        face = database[key]
        
        face_rec = algorithm.getFaceRecord(face.image,None,face.left_eye,face.right_eye)
        target_recs.append(face_rec)    
        if verbose:
            if time.time() - message_time > verbose:
                message_time = time.time()
                print "Processed target image %d of %d"%(i,len(target_keys))
                
    timer.mark("TargetStop",notes="Processed %d images."%len(target_keys))
    
    print "Finished processing FaceRecs (%d query, %d target)"%(len(query_keys),len(target_keys))
    
    # Compute the  matrix
    print "Computing similarity matrix..."
    timer.mark("SimilarityStart")
    mat = algorithm.similarityMatrix(query_recs,target_recs)
    timer.mark("SimilarityStop",notes="Processed %d comparisons."%(mat.shape[0]*mat.shape[1],))

    print "Completing task..."
    print mat.shape
    
    bee_mat = pv.BEEDistanceMatrix(mat,"FRGC_Exp_2.0.4_Query.xml", "FRGC_Exp_2.0.4_Target.xml", sigset_dir=database.sigset_dir, is_distance=False)
    
    if ilog != None:
        ilog(timer)
        
    return bee_mat, timer