コード例 #1
0
def points_xyztheta_to_xyzquat(points_xyztheta):
    new_points = []
    for pt in tp(points_xyztheta):
        quat = Quaternion.from_yaw(pt[-1])
        new_points.append(na.append(pt[0:3], quat.q))
    new_points = na.array(new_points)
    return tp(new_points)
コード例 #2
0
def compH_dt(qray,dray,ddep,constants):
    # set variables
    wRq, wRd, qYaw, nYaw = constants
#    prm = np.array([0,0,0,1])
#    prm = lsqH_dt(prm,qray,dray,ddep,constants)
#    valid = (errH_dt(prm,qray,dray,ddep,constants)<.001).all()
#    return prm, valid
    xd, yq = dray, qray
    xw = tp(np.dot(wRd,tp(xd)))
    yw = tp(np.dot(wRq,tp(yq)))
    z = np.cross(yw,xw)
    a = nYaw * np.pi/180 # homography normal bearing
#    # compute homography parameters
    t = geom.normalrows(np.cross(z[0,:],z[1,:])) # homography translation
    w = np.cross(yw,t)
    maxidx = np.argmax(w,1)
    b = z[[0,1],maxidx]/w[[0,1],maxidx]
#    b = np.mean(z/w,1)
    k = np.mean(-b/(xw[:,0]*np.sin(a)+xw[:,2]*np.cos(a)))
    t = k*t
    if np.mean(np.inner(xw-yw,t)) < 0: t, a = -t, a+np.pi
    dep = np.mean(ddep*np.inner(xw,[-np.sin(a),0,-np.cos(a)]))
    prm = np.append(t,dep)
    valid = (errH_dt(prm,qray,dray,ddep,constants)<.01).all()
    return prm, valid
コード例 #3
0
def LfromLSD(path, img, Kcal):

    # load lines; if not already generated, run LSD
    if not os.path.isdir(os.path.dirname(path)): os.path.mkdir(os.path.dirname(path))
    if not os.path.isfile(path):
        callLSD(path, img)
    lines = loadLines(path)

    # map the line segment endpoints to the image frame
    nlines = lines.shape[0]
    Kinv = alg.inv(Kcal)
    end1 = tp( np.dot( Kinv , np.concatenate( ([lines[:,0]],[lines[:,1]],[np.ones(nlines)]) , 0 ) ) )
    end2 = tp( np.dot( Kinv , np.concatenate( ([lines[:,2]],[lines[:,3]],[np.ones(nlines)]) , 0 ) ) )

    # convert to midpoints, equations, and lengths
    lineqs = np.zeros((nlines,3))
    lineqs[:,0] , lineqs[:,1] = end2[:,1]-end1[:,1] , end1[:,0]-end2[:,0]
    lineqs[:,2] = -np.sum(lineqs*end1,1)
    lineqs = geom.normalrows(lineqs)
    lengths = geom.vecnorm(end1-end2)
    midpts = (end1+end2)/2.0

    # remove lines that are too vertical
    mask = np.abs(lineqs[:,1]/lineqs[:,0]) > np.tan(10*np.pi/180)
    midpts, lineqs, lengths = midpts[mask,:], lineqs[mask,:], lengths[mask]

    return midpts, lineqs, lengths
コード例 #4
0
def esserrf_tq(prm, qray, dray, pr, wRd, domidx):
    # set variables
    dRq = np.dot(tp(wRd), geom.RfromYPR(prm[2], pr[0], pr[1]))
    td = np.dot(tp(wRd), geom.normalrows(np.insert(prm[:2], domidx, 1)))
    E = np.dot(tp(dRq), geom.xprodmat(td))
    # Compute homography error
    return np.sum(qray * tp(np.dot(E, tp(dray))), 1)
コード例 #5
0
def compH_dtn(qray,dray,ddep,constants):
    # set variables
    wRq, wRd, qYaw, nYaw = constants
#    prm = np.array([0,0,0,0,1])
#    prm = lsqH_dtn(prm,qray,dray,ddep,constants)
#    valid = (errH_dtn(prm,qray,dray,ddep,constants)<.001).all()
#    return prm, valid
    xd, yq = dray, qray
    xw = tp(np.dot(wRd,tp(xd)))
    yw = tp(np.dot(wRq,tp(yq)))
    z = np.cross(yw,xw)
#    # compute homography parameters
    t = geom.normalrows(np.cross(z[0,:],z[1,:])) # homography translation
    w = np.cross(yw,t)
    maxidx = np.argmax(w,1)
    b = z[[0,1],maxidx]/w[[0,1],maxidx]
    ka_init = np.array([0,np.pi+np.mean(np.arctan2(xw[:,0],xw[:,2]))])
    errf = lambda prm,argb,argx: argb+prm[0]*(argx[:,0]*np.sin(prm[1])+argx[:,2]*np.cos(prm[1]))
    k, a = tuple( opt.leastsq(errf,ka_init,args=(b,xw),warning=False)[0] )
    t = k*t
    if np.mean(np.inner(xw-yw,t)) < 0: t, a = -t, a+np.pi
    dep = np.mean(ddep*np.inner(xw,[-np.sin(a),0,-np.cos(a)]))
    prm = np.append(t,[180/np.pi*a,dep])
    valid = (errH_dtn(prm,qray,dray,ddep,constants)<.01).all()
    return prm, valid
コード例 #6
0
def esserrf_tq(prm,qray,dray,pr,wRd,domidx):
    # set variables
    dRq = np.dot(tp(wRd),geom.RfromYPR(prm[2],pr[0],pr[1]))
    td = np.dot(tp(wRd),geom.normalrows(np.insert(prm[:2],domidx,1)))
    E = np.dot(tp(dRq),geom.xprodmat(td))
    # Compute homography error
    return np.sum( qray * tp(np.dot(E,tp(dray))) , 1 )
コード例 #7
0
ファイル: groundings_test.py プロジェクト: h2r/slu_core
    def testPath(self):
        pobj = PhysicalObject(
            Prism.from_points_xy(tp([(0, 0), (1, 0), (1, 1), (0, 1)]), 0,
                                 3), ["tires"],
            path=Path.from_xyztheta([0, 1, 2],
                                    tp([(3, 3, 0, 0), (3, 3, 0, math.pi / 4),
                                        (4, 4, 1, math.pi / 4)])))

        self.assertEqual(pobj.prismAtT(0), pobj.prism)

        aeq(pobj.path.locationAtT(1), (3, 3, 0, math.pi / 4))

        self.assertEqual(
            pobj.prismAtT(1),
            Prism.from_points_xy(
                array([[0.5, 1.20710678, 0.5, -0.20710678],
                       [-0.20710678, 0.5, 1.20710678, 0.5]]), 0.0, 3.0))

        aeq(pobj.path.locationAtT(2), (4, 4, 1, math.pi / 4))

        self.assertEqual(
            pobj.prismAtT(2),
            Prism.from_points_xy(
                array([[1.5, 2.20710678, 1.5, 0.79289322],
                       [0.79289322, 1.5, 2.20710678, 1.5]]), 1.0, 4.0))

        aeq(pobj.path.locationAtT(-1),
            pobj.path.locationAtT(len(pobj.path.timestamps)))
コード例 #8
0
    def testGroundings(self):
        corpus = annotationIo.load(SOURCE_FILE)
        annotation = corpus[0]

        esdc = annotation.flattenedEsdcs[0]

        annotation.addGrounding(
            esdc,
            PhysicalObject(
                Prism.from_points_xy(tp([(0, 0), (1, 0), (1, 1), (0, 1)]), 3,
                                     4), ["tire", "pallet"]))

        annotation.addGrounding(
            esdc,
            Place(
                Prism.from_points_xy(tp([(0, 0), (1, 0), (1, 1), (0, 1)]), 3,
                                     4)))

        annotation.addGrounding(
            esdc,
            Path.from_xyztheta(timestamps=[0, 1],
                               points_xyztheta=pts_to_xyzTheta([(0, 0),
                                                                (1, 1)])))

        yamlCorpus = annotationIo.toYaml(corpus)

        print "yaml", yamlCorpus
        newCorpus = annotationIo.fromYaml(yamlCorpus)

        esdc1 = corpus[0].flattenedEsdcs[0]
        esdc2 = newCorpus[0].flattenedEsdcs[0]
        null_ids(esdc1)
        null_ids(esdc2)
        self.assertEqual(esdc1, esdc2)
コード例 #9
0
ファイル: 3d_features_test.py プロジェクト: h2r/slu_features
    def testPrism(self):
        prism1 = Prism.from_points_xy(tp([(0, 0), (1, 0), (1, 1), (0, 1)]),
                                      zStart=0,
                                      zEnd=4)

        prism2 = Prism.from_points_xy(tp([(0, 0), (1, 0), (1, 1), (0, 1)]),
                                      zStart=4.1,
                                      zEnd=5)

        self.assertEqual(math3d_higher_than(prism2, prism2), False)
        self.assertEqual(math3d_higher_than(prism1, prism1), False)

        self.assertEqual(math3d_higher_than(prism1, prism2), False)
        self.assertEqual(math3d_higher_than(prism2, prism1), True)

        self.assertEqual(math2d_overlaps(prism1.points_xy, prism2.points_xy),
                         True)
        self.assertEqual(math2d_overlaps(prism2.points_xy, prism1.points_xy),
                         True)

        #print "points", prism1.points_xy
        #print "points", prism1.points_xy[0]
        features = sfe_f_prism_l_prism(prism1, prism2, normalize=True)
        fnames = list(sfe_f_prism_l_prism_names())
        self.assertEqual(len(fnames), len(features))
        print fnames
        print features
        fmap = dict(zip(fnames, features))
        self.assertEqual(fmap['F_3dEndsHigherThanFigureLandmark'], 0)
        self.assertEqual(fmap['F_3dEndsHigherThanLandmarkFigure'], 1)

        self.assertEqual(fmap['F_3dSupportsFigureLandmark'], 1)
        self.assertEqual(fmap['F_3dSupportsLandmarkFigure'], 0)
コード例 #10
0
    def from_pose(points_xy, zStart, zEnd, dloc, quaternion=Quaternion.null()):
        """
        Creates a prism at a pose with the specified geometry
        """
        assert not na.any(na.isnan(points_xy)), points_xy

        dx, dy, dz = dloc
        X, Y = points_xy
        X = X + dx
        Y = Y + dy

        lower_points_xyz = na.array(
            [X, Y, na.zeros(len(points_xy[0])) + zStart + dz])
        upper_points_xyz = na.array(
            [X, Y, na.zeros(len(points_xy[0])) + zEnd + dz])

        dloc = na.array([dx, dy, dz])

        lower_points_xyz = tp(
            [quaternion.rotate(p - dloc) + dloc for p in tp(lower_points_xyz)])
        upper_points_xyz = tp(
            [quaternion.rotate(p - dloc) + dloc for p in tp(upper_points_xyz)])
        if na.any(na.isnan(lower_points_xyz) + na.isnan(upper_points_xyz)):
            print "points_xy", points_xy
            print "z", zStart, zEnd
            print "dloc", dloc
            raise ValueError("nan")
        return Prism(lower_points_xyz, upper_points_xyz)
コード例 #11
0
def points_xyzquat_to_xyztheta(points_xyzquat):
    new_points = []
    for pt in tp(points_xyzquat):
        quat = Quaternion(pt[3:])
        roll, pitch, yaw = quat.to_roll_pitch_yaw()
        new_points.append(na.append(pt[0:3], [yaw]))
    return tp(new_points)
コード例 #12
0
def errE_t(prm, qray, dray, constants, domidx):
    # set variables
    wRq, wRd, qYaw = constants
    dRq = np.dot(tp(wRd), wRd)
    td = np.dot(tp(wRd), prm)
    E = np.dot(tp(dRq), geom.xprodmat(td))
    # Compute homography error
    return np.abs(np.sum(qray * tp(np.dot(E, tp(dray))), 1))
コード例 #13
0
def compute_seq_corr_matrix(X, N):
    '''computes sequnece correlation matrix'''
    seq_mat_prod = dot(X, tp(X)) / N
    seq_avg_prod = \
        dot(tp(matrix(mean(tp(X), 0))), matrix(mean(tp(X), 0)))

    seq_corr = npabs(seq_mat_prod - seq_avg_prod)
    return seq_corr
コード例 #14
0
def errE_t(prm,qray,dray,constants,domidx):
    # set variables
    wRq, wRd, qYaw = constants
    dRq = np.dot(tp(wRd),wRd)
    td = np.dot(tp(wRd),prm)
    E = np.dot(tp(dRq),geom.xprodmat(td))
    # Compute homography error
    return np.abs( np.sum( qray * tp(np.dot(E,tp(dray))) , 1 ) )
コード例 #15
0
ファイル: helper.py プロジェクト: famagusta/sca_python
def compute_seq_corr_matrix(X, N):
    '''computes sequnece correlation matrix'''
    seq_mat_prod = dot(X, tp(X))/N
    seq_avg_prod = \
        dot(tp(matrix(mean(tp(X), 0))), matrix(mean(tp(X), 0)))

    seq_corr = npabs(seq_mat_prod - seq_avg_prod)
    return seq_corr
コード例 #16
0
def planefrom3d(C, Q, dbmatch, domplane, Kdinv, wRd):

    if domplane == -1: return np.nan * np.zeros(5)

    # get 3d points on plane
    planes = np.asarray(
        Image.open(os.path.join(C.hiresdir, dbmatch + '-planes.png')))
    depths = np.asarray(
        Image.open(os.path.join(C.hiresdir, dbmatch + '-depth.png')))
    y, x = np.nonzero(planes == domplane)
    npts = len(x)
    pray = geom.normalrows(
        tp(
            np.dot(
                wRd,
                np.dot(Kdinv, np.concatenate(([x], [y], [np.ones(npts)]),
                                             0)))))
    pdep = depths[y, x] / 100.0
    p3d = np.append(geom.vecmul(pray, pdep),
                    tp(np.array([np.ones(len(pray))])), 1)
    xz_pts = p3d[:, [0, 2, 3]]

    # RANSAC solve
    threshold, g = 2, np.array([0, 1, 0])  # meters
    bprm, bnumi, bmask = np.zeros(3), 0, np.bool_(np.zeros(npts))
    for i in range(1000):
        i1 = rnd.randint(0, npts)
        i2 = rnd.randint(0, npts - 1)
        i2 = i2 if i2 < i1 else i2 + 1
        i3 = rnd.randint(0, npts - 2)
        i3 = i3 if i3 < min(i1, i2) else (i3 +
                                          1 if i3 + 1 < max(i1, i2) else i3 +
                                          2)
        inlpts = xz_pts[[i1, i2, i3], :]
        prm = geom.smallestSingVector(inlpts)
        prm = prm / geom.vecnorm(prm[:2])
        prm = -prm if prm[2] < 0 else prm
        errs = np.abs(np.inner(xz_pts, prm))
        inlmask = errs < threshold
        numi = np.sum(inlmask)
        if numi > bnumi and float(numi) / npts > 0.5:
            bprm, bmask, bnumi = prm, inlmask, numi
    prm, numi, mask = bprm, bnumi, bmask

    # guided matching
    for i in range(10):
        if numi == 0: break
        prm = geom.smallestSingVector(xz_pts[mask, :])
        prm = prm / geom.vecnorm(prm[:2])
        prm = -prm if prm[2] < 0 else prm
        errs = np.abs(np.inner(xz_pts, prm))
        mask = errs < threshold
        numi = np.sum(mask)

    # get error
    err = np.mean(np.abs(np.inner(xz_pts[mask, :], prm)))

    return np.array([prm[0], 0, prm[1], prm[2], err])
コード例 #17
0
def homerrf_t(prm,qray,dray,ddep,dRq,wRd,nbear):
    # set variables
    td = np.dot(tp(wRd),prm[:3])
    nd = -np.dot(tp(wRd),[np.sin(nbear*np.pi/180),0,np.cos(nbear*np.pi/180)])
    H = np.dot(tp(dRq),np.eye(3,3)-np.outer(td,nd))
    # Compute homography error
    Hd = tp(np.dot(H,tp(dray)))
    err = np.append( qray[:,0]/qray[:,2]-Hd[:,0]/Hd[:,2] , qray[:,1]/qray[:,2]-Hd[:,1]/Hd[:,2] )
    return err
コード例 #18
0
def compE_t(qray,dray,constants):
    # set variables
    wRq, wRd, qYaw = constants
    xd, yq = dray, qray
    yw = tp(np.dot(wRq,tp(yq)))
    xw = tp(np.dot(wRd,tp(xd)))
    tn = np.cross(yw,xw)
    # compute essential matrix parameters based off guessed yaw
    t = geom.normalrows(np.cross(tn[0,:],tn[1,:])) # homography translation
    return t, -1, True
コード例 #19
0
def lsqE_t(prm,qray,dray,constants,domidx):
    # set variables
    wRq, wRd, qYaw = constants
    xd, yq = dray, qray
    yw = tp(np.dot(wRq,tp(yq)))
    xw = tp(np.dot(wRd,tp(xd)))
    tn = np.cross(yw,xw) # no renormalization to bias more confident planes
    # compute essential matrix parameters based off guessed yaw
    teig = alg.eig(np.dot(tp(tn),tn))
    return geom.normalrows(teig[1][:,np.argmin(teig[0])]) # essential matrix translation
コード例 #20
0
def compE_t(qray, dray, constants):
    # set variables
    wRq, wRd, qYaw = constants
    xd, yq = dray, qray
    yw = tp(np.dot(wRq, tp(yq)))
    xw = tp(np.dot(wRd, tp(xd)))
    tn = np.cross(yw, xw)
    # compute essential matrix parameters based off guessed yaw
    t = geom.normalrows(np.cross(tn[0, :], tn[1, :]))  # homography translation
    return t, -1, True
コード例 #21
0
def calculate_w(reg, x, y):
  d = x.shape[1]
  covar = mm(tp(x),x )
  lambdai = np.diag( np.ones(d)*reg  )
  addedmatrix = lambdai + covar
  inverse = inv(addedmatrix)

  rightside = mm(tp(x), y) 

  w = mm(inverse,rightside)
  return w
コード例 #22
0
ファイル: objectAnnotator.py プロジェクト: h2r/slu_core
 def addPath(self):
     annotation = self.annotationModel.selectedAnnotation()
     esdc = self.esdcModel.selectedEsdc()
     timestamps = [0 for p in self.currPath]
     points_xyztheta = [tp(self.currPath)[0], tp(self.currPath)[1], [0 for p in self.currPath], [0 for p in self.currPath]]
     path = Path(timestamps, points_xyztheta)
     annotation.addGrounding(esdc, path)
     self.groundingsModel.setData(annotation.getGroundings(esdc))
     self.pathNodes = {}
     self.currPath = []
     self.drawForPath()
コード例 #23
0
    def destPolygons(self):
        fvec = spatial_features_avs_polygon_polygon(
            tp([(0, 0), (1, 0), (1, 1), (0, 1)]),
            tp([(2, 0), (3, 0), (3, 1), (2, 1)]), 0)

        self.assertFalse(any(na.isnan(x) for x in fvec))

        fvec = spatial_features_avs_polygon_polygon(
            tp([(0, 0), (1, 0), (1, 1), (0, 1)]),
            tp([(0, 0), (1, 0), (1, 1), (0, 1)]), 0)
        self.assertFalse(any(na.isnan(x) for x in fvec))
コード例 #24
0
def lsqE_t(prm, qray, dray, constants, domidx):
    # set variables
    wRq, wRd, qYaw = constants
    xd, yq = dray, qray
    yw = tp(np.dot(wRq, tp(yq)))
    xw = tp(np.dot(wRd, tp(xd)))
    tn = np.cross(yw, xw)  # no renormalization to bias more confident planes
    # compute essential matrix parameters based off guessed yaw
    teig = alg.eig(np.dot(tp(tn), tn))
    return geom.normalrows(
        teig[1][:, np.argmin(teig[0])])  # essential matrix translation
コード例 #25
0
def compressP(timestamps, points_xyztheta):
    points = tp(points_xyztheta)
    assert len(points) == len(timestamps), (len(points), len(timestamps))
    ctimestamps = []
    cpoints = []

    for t, pt in zip(timestamps, points):
        if len(cpoints) == 0 or not all(pt == cpoints[-1]):
            ctimestamps.append(t)
            cpoints.append(pt)
    return ctimestamps, tp(cpoints)
コード例 #26
0
def homerrf_dt(prm,qray,dray,ddep,dRq,wRd,nbear):
    # set variables
    td = np.dot(tp(wRd),prm[:3])
    nd = -np.dot(tp(wRd),[np.sin(nbear*np.pi/180),0,np.cos(nbear*np.pi/180)])
    pd = prm[3]
    H = np.dot(tp(dRq),np.eye(3,3)-np.outer(td,nd))
    # Compute homography error
    Hd = tp(np.dot(H,tp(dray)))
    err = np.concatenate( [ qray[:,0]/qray[:,2]-Hd[:,0]/Hd[:,2] ,
                            qray[:,1]/qray[:,2]-Hd[:,1]/Hd[:,2] ,
                            alpha() * ( pd - ddep*np.inner(dray,nd) ) / pd ] )
    return err
コード例 #27
0
    def draw(self):
        print "drawing"
        self.axes.clear()
        entry = self.nwayVsBinaryModel.selectedEntry()
        print "plotting", entry.nway_score
        g = entry.geometry
        X, Y = tp(g["figure"])
        self.axes.plot(X, Y, color="blue")

        X, Y = tp(g["landmark"] + [g["landmark"][0]])
        self.axes.plot(X, Y, color="red")

        self.figure.canvas.draw()
コード例 #28
0
def drawObjectPathCostMap(featureBrowser, obj_esdc, event_esdcs, annotation,
                          cf, xmin, xmax, ymin, ymax, step):
    xstart, ystart = annotation.getGroundings(obj_esdc)[0].centroid2d

    ax, ay = annotation.agent.centroid2d
    ath = annotation.agent.path.theta[0]
    print ax, ay, ath

    #agent move to pick up object
    aX, aY = sf.math2d_step_along_line(tp([(ax, ay), (xstart, ystart)]), .1)
    aTh = ath * na.ones(len(aX))

    costs = na.zeros((int((ymax - ymin) / step), int((xmax - xmin) / step)))
    annotations = []

    for i, x in enumerate(na.arange(xmin, xmax, step)):
        for j, y in enumerate(na.arange(ymin, ymax, step)):

            X, Y = sf.math2d_step_along_line(tp([(xstart, ystart), (x, y)]),
                                             .1)

            Z = na.ones(len(X))
            th = na.zeros(len(X))
            timestamps = range(len(X))

            path = Path(timestamps, [X, Y, Z, th])

            new_annotation = annotation_copy(annotation)
            atimestamps = range(len(X) + len(aX))
            axs = na.append(aX, X)
            ays = na.append(aY, Y)
            azs = na.zeros(len(X) + len(aX))
            ath = na.append(aTh, th)
            new_annotation.agent.path = Path(atimestamps, [axs, ays, azs, ath])

            obj = new_annotation.getGroundings(obj_esdc)[0]
            obj.path = path

            assignPathGroundings(event_esdcs[0], new_annotation)

            state, gggs = annotation_to_ggg_map(new_annotation)
            ggg = ggg_from_esdc(new_annotation.esdcs[0])
            factor = ggg.esdc_to_factor(event_esdcs[0])

            cost, entries = cf.compute_costs([factor],
                                             ggg,
                                             state_sequence=None)
            costs[j][i] = math.exp(-1.0 * cost)
            annotations.append(((x, y), entries))
        print i
    featureBrowser.setCostImage(costs, annotations, xmin, xmax, ymin, ymax)
コード例 #29
0
 def fromYaml(yaml):
     if yaml == None:
         return None
     else:
         if "points_xyztheta" in yaml:
             return Path.from_xyztheta(
                 timestamps=[float(long(x)) for x in yaml["timestamps"]],
                 points_xyztheta=tp(yaml["points_xyztheta"]))
         elif "points_xyzquat" in yaml:
             return Path(
                 timestamps=[float(long(x)) for x in yaml["timestamps"]],
                 points_xyzquat=tp(yaml["points_xyzquat"]))
         else:
             raise ValueError("Unsupported format yet.")
コード例 #30
0
    def destSameOrientedPolygons(self):
        polygon = [
            [21.72099827, 40.789814],
            [21.22099828, 41.65583942],
            [22.26022877, 42.25583939],
            [22.76022875, 41.38981397],
        ]

        fvec = spatial_features_avs_polygon_polygon(tp(polygon), tp(polygon),
                                                    0)
        names = list(sfe_f_polygon_l_polygon_names())

        print names, fvec
        self.assertFalse(any(na.isnan(x) for x in fvec))
コード例 #31
0
ファイル: context3d.py プロジェクト: h2r/slu_core
    def findIntersect(self,
                      here_point,
                      select_point,
                      obj=None,
                      grounding_filter=lambda x: True):
        if obj == None:
            obj = self.findClosestGrounding(select_point, grounding_filter)
        p = obj.prism
        x, y, z = sf.math3d_intersect_line_plane(
            tp([select_point, here_point]),
            tp([(x, y, p.zEnd) for x, y in p.points_pts]))

        assert_sorta_eq(z, p.zEnd)
        z = p.zEnd
        return (x, y, z), obj
コード例 #32
0
ファイル: geom.py プロジェクト: taoihsu/image_retreival
def YfromR(R):
    yaw = np.arctan2(R[0, 2], R[2, 2])
    Ry = np.array([[np.cos(yaw), 0, np.sin(yaw)], [0, 1, 0],
                   [-np.sin(yaw), 0, np.cos(yaw)]])
    Rpr = np.dot(tp(Ry), R)
    yaw = 180 / np.pi * yaw
    return yaw, Rpr
コード例 #33
0
def annotation_candidate(esdc_structure, esdc_field_to_texts, groundings,
                         test_grounding):
    esdc_candidate = make_esdc_candidate(esdc_structure, esdc_field_to_texts)
    annotation = Annotation("test", esdc_candidate)

    esdc_candidate = esdc_candidate[0]
    annotation.setGrounding(esdc_candidate, test_grounding)
    for field in ExtendedSdc.fieldNames:
        if not esdc_candidate.childIsEmpty(field):
            child = esdc_candidate.children(field)[0]
            while True:
                grounding = random.choice(groundings)
                if not isinstance(grounding, PhysicalObject):
                    continue
                else:
                    break
            annotation.setGrounding(child, grounding)
    if isinstance(test_grounding, PhysicalObject):
        annotation.agent = test_grounding
    else:
        annotation.agent = PhysicalObject(Prism(
            tp([(0, 0), (1, 0), (1, 1), (0, 1)]), 0, 1),
                                          tags=["agent"],
                                          path=test_grounding)

    return annotation, esdc_candidate
コード例 #34
0
 def findEval(self):
     numWrong = 0
     for index, x in enumerate(self.Z_validation):
         y = np.sign(dot(tp(self.w), x))
         if self.y_validation[index] != y:
             numWrong += 1
     return float(numWrong) / float(len(self.y_validation))
コード例 #35
0
 def findEout(self):
     numWrong = 0
     for index, x in enumerate(self.Z_test):
         y = np.sign(dot(tp(self.w), x))
         if self.y_test[index] != y:
             numWrong += 1
     return float(numWrong) / float(len(self.y_test))
コード例 #36
0
def planefrom3d(C, Q, dbmatch, domplane, Kdinv, wRd):

    if domplane == -1: return np.nan * np.zeros(5)

    # get 3d points on plane
    planes = np.asarray( Image.open( os.path.join(C.hiresdir,dbmatch+'-planes.png') ) )
    depths = np.asarray( Image.open( os.path.join(C.hiresdir,dbmatch+'-depth.png') ) )
    y, x = np.nonzero(planes==domplane)
    npts = len(x)
    pray = geom.normalrows( tp( np.dot( wRd , np.dot( Kdinv , np.concatenate( ([x],[y],[np.ones(npts)]) , 0 ) ) ) ) )
    pdep = depths[y,x]/100.0
    p3d = np.append( geom.vecmul(pray,pdep) , tp(np.array([np.ones(len(pray))])) , 1 )
    xz_pts = p3d[:,[0,2,3]]

    # RANSAC solve
    threshold, g = 2, np.array([0,1,0]) # meters
    bprm, bnumi, bmask = np.zeros(3), 0, np.bool_(np.zeros(npts))
    for i in range(1000):
        i1 = rnd.randint(0,npts)
        i2 = rnd.randint(0,npts-1)
        i2 = i2 if i2<i1 else i2+1
        i3 = rnd.randint(0,npts-2)
        i3 = i3 if i3<min(i1,i2) else ( i3+1 if i3+1<max(i1,i2) else i3+2 )
        inlpts = xz_pts[[i1,i2,i3],:]
        prm = geom.smallestSingVector(inlpts)
        prm = prm / geom.vecnorm(prm[:2])
        prm = -prm if prm[2]<0 else prm
        errs = np.abs(np.inner(xz_pts,prm))
        inlmask = errs < threshold
        numi = np.sum(inlmask)
        if numi > bnumi and float(numi)/npts > 0.5: bprm, bmask, bnumi = prm, inlmask, numi
    prm, numi, mask = bprm, bnumi, bmask

    # guided matching
    for i in range(10):
        if numi == 0: break
        prm = geom.smallestSingVector(xz_pts[mask,:])
        prm = prm / geom.vecnorm(prm[:2])
        prm = -prm if prm[2]<0 else prm
        errs = np.abs(np.inner(xz_pts,prm))
        mask = errs < threshold
        numi = np.sum(mask)

    # get error
    err = np.mean(np.abs(np.inner(xz_pts[mask,:],prm)))

    return np.array([prm[0],0,prm[1],prm[2],err])
コード例 #37
0
def compH_dtqn(qray,dray,ddep,constants):
    # set variables
    Rpr, wRd, qYaw, nYaw = constants
    pr = geom.YPRfromR(Rpr)[1:] # pitch and roll
    dRq = np.dot(tp(wRd),geom.RfromYPR(qYaw,pr[0],pr[1]))
    xd, yq = dray, qray
    yd = tp(np.dot(dRq,tp(yq)))
    xw = tp(np.dot(wRd,tp(xd)))
    tn = np.cross(yd,xd) # no renormalization to bias more confident planes
    # compute homography parameters
    teig = alg.eig(np.dot(tp(tn),tn))
    nullidx = np.argmin(teig[0])
    valid = teig[0][nullidx] < 1e-2
    t = geom.normalrows(teig[1][:,nullidx]) # homography translation
    m = geom.vecnorm(tn)/(geom.vecnorm(np.cross(yd,t))*geom.vecnorm(xw[:,[0,2]]))
    f = np.arctan2(xw[:,0],xw[:,2])
    errf = lambda prm,argm,argf: prm[0]-argm/np.cos(prm[1]-argf)
    kn_init = np.array([1.2*np.mean(m),np.mean(f)])
    k, n = tuple( opt.leastsq(errf,kn_init,args=(m,f),warning=False)[0] )
    valid = valid and np.std( m/(k*np.cos(n-f)) ) < 0.1
    fe = np.mod(n-np.mean(f),2*np.pi)
    if np.abs(fe) < np.pi/2: n = np.mod(n+np.pi,2*np.pi)
    if np.mean(np.inner(xd-yd,t)) < 0: t = -t
    # compute plane depth
    nd = -np.dot(tp(wRd),[np.sin(n),0,np.cos(n)])
    dep = ddep*np.inner(dray,nd)
    pd = np.mean(dep)
    valid = valid and np.std(dep/pd) < 0.1
    # set parameters and refine
    prm = np.append(np.abs(k)*np.dot(wRd,t),[qYaw,180/np.pi*n,pd])
    if valid: prm = lsqH_dtqn(prm,qray,dray,ddep,constants)
    valid = valid and geom.vecnorm(prm[:3]) < 5
    return prm, valid
コード例 #38
0
def compE_tq(qray,dray,constants):
    # set variables
    Rpr, wRd, qYaw = constants
    pr = geom.YPRfromR(Rpr)[1:] # pitch and roll
    wRq = geom.RfromYPR(qYaw,pr[0],pr[1])
    xd, yq = dray, qray
    yw = tp(np.dot(wRq,tp(yq)))
    xw = tp(np.dot(wRd,tp(xd)))
    tn = np.cross(yw,xw) # no renormalization to bias more confident planes
    # compute essential matrix parameters based off guessed yaw
    teig = alg.eig(np.dot(tp(tn),tn))
    nullidx = np.argmin(teig[0])
    valid = teig[0][nullidx]/teig[0][np.argmax(teig[0])] < 1e-2
    t = geom.normalrows(teig[1][:,nullidx]) # essential matrix translation
    domidx = np.argmax(t)
    prm = np.append( np.delete(t/t[domidx],domidx) , qYaw )
    if valid: prm = lsqE_tq(prm,qray,dray,constants,domidx)
    return prm, domidx, valid
コード例 #39
0
ファイル: diaper_state.py プロジェクト: h2r/conversations
    def example_state():
        robot = PhysicalObject(Prism.from_points_xy(tp([(0, 0), (1, 0), (1, 1), (0, 1)]),
                                                    0, 2),
                               tags=("robot",), 
                               lcmId=DiaperState.AGENT_ID + 2)
        caregiver = PhysicalObject(Prism.from_points_xy(tp([(3, 2), (4, 2), (4, 3), (3, 3)]),
                                                    0, 2),
                               tags=("caregiver",), 
                               lcmId=DiaperState.AGENT_ID + 3)
        child = PhysicalObject(Prism.from_points_xy(tp([(4, 4), (5, 4), (5, 5), (4, 5)]),
                                                    0, 0.5),
                               tags=("child",), 
                               lcmId=DiaperState.AGENT_ID + 3)


        objects = [
            PhysicalObject(Prism.from_points_xy(tp([(-1, 1.1), (2, 1.1), (2, 3),
                                                        (-1, 3)]),
                                                    1, 1.25), tags=("table",), lcmId=3),
            PhysicalObject(Prism.from_points_xy(tp([(0.5, 1.5), (0.75, 1.5), 
                                                    (0.75, 1.75), (0.5, 1.75)]),
                                                1.25, 1.3), tags=("wipes",), 
                           lcmId=4), 
            PhysicalObject(Prism.from_points_xy(tp([(1, 2), (1.25, 2), 
                                                    (1.25, 2.25), (1, 2.25)]),
                                                1.25, 1.3), tags=("diaper",), 
                           lcmId=5)]
        return DiaperState(robot, caregiver, child, objects)
コード例 #40
0
def compH_dtq(qray,dray,ddep,constants):
    # set variables
    Rpr, wRd, qYaw, nYaw = constants
    pr = geom.YPRfromR(Rpr)[1:] # pitch and roll
    dRq = np.dot(tp(wRd),geom.RfromYPR(qYaw,pr[0],pr[1]))
    xd, yq = dray, qray
    yd = tp(np.dot(dRq,tp(yq)))
    xw = tp(np.dot(wRd,tp(xd)))
    tn = np.cross(yd,xd)
    n = nYaw * np.pi/180 # homography normal bearing
    # compute homography parameters based off guessed yaw
    t = geom.normalrows(np.cross(tn[0,:],tn[1,:])) # homography translation
    m = geom.vecnorm(tn)/(geom.vecnorm(np.cross(yd,t))*geom.vecnorm(xw[:,[0,2]]))
    f = np.arctan2(xw[:,0],xw[:,2])
    k = np.mean( m / np.cos(n-f) )
    valid = np.std( m/(k*np.cos(n-f)) ) < 0.1
    fe = np.mod(n-np.mean(f),2*np.pi)
    if np.abs(fe) < np.pi/2: n = np.mod(n+np.pi,2*np.pi)
    if np.mean(np.inner(xd-yd,t)) < 0: t = -t
    # compute plane depth
    nd = -np.dot(tp(wRd),[np.sin(n),0,np.cos(n)])
    dep = ddep*np.inner(dray,nd)
    pd = np.mean(dep)
    valid = valid and np.std(dep/pd) < 0.1
    # set parameters and refine
    prm = np.append(np.abs(k)*np.dot(wRd,t),[qYaw,pd])
    if valid: prm = lsqH_dtq(prm,qray,dray,ddep,constants)
    valid = valid and geom.vecnorm(prm[:3]) < 5
    return prm, valid
コード例 #41
0
ファイル: ssa.py プロジェクト: jibbals/EOF
 def __init__(self, ts, M=None):
     self.data=ts
     
     if M is None: 
         M=int(len(ts)/4.0)
     self.M = M
     N=len(ts)
     self.N = N
     
     # create M dimensional phase spaces
     X = np.zeros([M,N-M+1])
     X_norm = np.zeros([M,N-M+1])
     
     for i in range(M):
         X[i,:] = ts[i:(N-M+1+i)]
         X_norm[i,:] = X[i,:] - np.mean(X[i,:])
     self.trajectory = X
     
     # AUTO COVARIANCE MATRIX
     self.R = mm(X_norm, tp(X_norm)) / (N-M+1)
     self.cov = np.cov(X)
     
     # eigen stuff, Principal components
     self.evals, self.evecs = npla.eig(self.R)
     self.PC = mm(tp(self.evecs), X_norm)
     
     # scale this to unbias it, convolution end points are based on fewer additions
     RC=np.zeros([M,N])
     for col in range(M):
         # use convolution to get reconstructed components
         RCconv = np.convolve(self.PC[:,col],self.evecs)
         if col<M-1:
             RC[:,col]=RCconv/float(col)
         elif col<N-M+1:
             RC[:,col]=RCconv/float(M)
         elif col<N:
             RC[:,col]=RCconv/float(N-col+1)
     self.RC=RC
     assert (np.sum(np.abs(np.sum(RC,axis=0) - ts)) < 0.001).all(), "Reconstruction failed"
コード例 #42
0
def compH_tn(qray,dray,ddep,constants):
    # set variables
    wRq, wRd, qYaw, nYaw = constants
    dRq = np.dot(tp(wRd),wRq)
    xd, yq = dray, qray
    yd = tp(np.dot(dRq,tp(yq)))
    xw = tp(np.dot(wRd,tp(xd)))
    tn = np.cross(yd,xd)
    # compute homography parameters
    t = geom.normalrows(np.cross(tn[0,:],tn[1,:])) # homography translation
    m = geom.vecnorm(tn)/(geom.vecnorm(np.cross(yd,t))*geom.vecnorm(xw[:,[0,2]]))
    f = np.arctan2(xw[:,0],xw[:,2])
    errf = lambda prm,argm,argf: prm[0]-argm/np.cos(prm[1]-argf)
    kn_init = np.array([1.2*np.mean(m),np.mean(f)])
    k, n = tuple( opt.leastsq(errf,kn_init,args=(m,f),warning=False)[0] )
    valid = np.std( m/(k*np.cos(n-f)) ) < 0.1
    fe = np.mod(n-np.mean(f),2*np.pi)
    if np.abs(fe) < np.pi/2: n = np.mod(n+np.pi,2*np.pi)
    if np.mean(np.inner(xd-yd,t)) < 0: t = -t
    # set parameters and refine
    prm = np.append(k*np.dot(wRd,t),180/np.pi*n)
    valid = valid and geom.vecnorm(prm[:3]) < 5
    return prm, valid
コード例 #43
0
def compH_t(qray,dray,ddep,constants):
    # set variables
    wRq, wRd, qYaw, nYaw = constants
    dRq = np.dot(tp(wRd),wRq)
    xd, yq = dray, qray
    yd = tp(np.dot(dRq,tp(yq)))
    xw = tp(np.dot(wRd,tp(xd)))
    tn = np.cross(yd,xd)
    n = nYaw * np.pi/180 # homography normal bearing
    # compute homography parameters
    t = geom.normalrows(np.cross(tn[0,:],tn[1,:])) # homography translation
    m = geom.vecnorm(tn)/(geom.vecnorm(np.cross(yd,t))*geom.vecnorm(xw[:,[0,2]]))
    f = np.arctan2(xw[:,0],xw[:,2])
    errf = lambda prm,argm,argf,argn: prm[0]-argm/np.cos(argn-argf)
    k_init = np.mean( m / np.cos(n-f) )
    k = tuple( opt.leastsq(errf,k_init,args=(m,f,n),warning=False)[0] )
#    k = np.mean( m / np.cos(n-f) )
    valid = np.std( m/(k*np.cos(n-f)) ) < 0.1
    if np.mean(np.inner(xd-yd,t)) < 0: t = -t
    # set parameters and refine
    prm = np.abs(k)*np.dot(wRd,t)
    valid = valid and geom.vecnorm(prm[:3]) < 5
    return prm, valid
コード例 #44
0
def scalefrom3d(matches, tray, wRq):

    # extract inliers
    imask = matches['imask']
    qray = matches['qray'][imask,:]
    w3d = matches['w3d'][imask,:]
    weights = matches['weight']

    # compute direction of 3d point from query image
    wray = tp(np.dot(wRq,tp(qray)))

    # set up Aq=k equation representing intersection of 2 lines
    numi = np.sum(imask)
    A = [ np.array([[ wray[i,2], -wray[i,0] ], [ tray[2], -tray[0] ]]) for i in range(numi) ]
    k = [ np.array( [ wray[i,2]*w3d[i,0]-wray[i,0]*w3d[i,2] , 0 ] ) for i in range(numi) ]

    # solve for intersection of 2 lines to get query location
    t_int = [ alg.solve(A[i],k[i]) for i in range(numi) ]
    
    # compute the corresponding scale factors attached to y
    idx = int(tray[2]>tray[0])
    scales = [ t_int[i][idx]/tray[2*idx] for i in range(numi) ]

    # find best collection of scale factors
    affprm = [ 2.5 , 0 ] # ransac loop chooses all scale factors within affprm[0]+s*affprm[1] meters of chose scale factor s
    bconf, bnum, bmask = 0, 0, np.bool_(np.zeros(len(scales)))
    for i in range(len(scales)):
        s = scales[i]
        mask = np.abs(s-scales) < affprm[0] + affprm[1] * np.abs(s)
        if np.sum(weights[mask]) > bconf : bconf, bnum, bmask = np.sum(weights[mask]), np.sum(mask), mask
    tdist = np.mean(np.compress(bmask,scales))
    t = tdist*tray

    print '%.0f / %.0f matches used to compute scale.' % (bnum,numi)
    
    return t, tdist
コード例 #45
0
def homerrf_tqn(prm,qray,dray,ddep,pr,wRd):
    # set variables
    dRq = np.dot(tp(wRd),geom.RfromYPR(prm[3],pr[0],pr[1]))
    td = np.dot(tp(wRd),prm[:3])
    nd = -np.dot(tp(wRd),[np.sin(prm[4]*np.pi/180),0,np.cos(prm[4]*np.pi/180)])
    H = np.dot(tp(dRq),np.eye(3,3)-np.outer(td,nd))
    # Compute homography error
    Hd = tp(np.dot(H,tp(dray)))
    err = np.append( qray[:,0]/qray[:,2]-Hd[:,0]/Hd[:,2] , qray[:,1]/qray[:,2]-Hd[:,1]/Hd[:,2] )
    return err
コード例 #46
0
ファイル: eof.py プロジェクト: jibbals/EOF
 def __init__(self, data):
     # data array, 2 dimensional with rows as time series
     # self.rawdata=data
     # normalised somehow
     self.data = data
     
     # Covariance array
     self.R=np.cov(data)
     
     # eigen-values, vectors ( already normalised )
     evals,evecs = npla.eig(self.R) 
     self.evals, self.evecs = evals,evecs
     
     # Principal Components
     self.PC=np.matmul(tp(evecs),data) 
     assert (np.abs(data-np.matmul(evecs,self.PC)) < 0.001).all() , "something is bad"
コード例 #47
0
def homerrf_dtqn(prm,qray,dray,ddep,pr,wRd):
    # set variables
    dRq = np.dot(tp(wRd),geom.RfromYPR(prm[3],pr[0],pr[1]))
    td = np.dot(tp(wRd),prm[:3])
    nd = -np.dot(tp(wRd),[np.sin(prm[4]*np.pi/180),0,np.cos(prm[4]*np.pi/180)])
    pd = prm[5]
    H = np.dot(tp(dRq),np.eye(3,3)-np.outer(td,nd))
    # Compute homography error
    Hd = tp(np.dot(H,tp(dray)))
    err = np.concatenate( [ qray[:,0]/qray[:,2]-Hd[:,0]/Hd[:,2] ,
                            qray[:,1]/qray[:,2]-Hd[:,1]/Hd[:,2] ,
                            alpha() * ( pd - ddep*np.inner(dray,nd) ) / pd ] )
    return err
コード例 #48
0
def errH_dt(prm,qray,dray,ddep,constants):
    wRq, wRd, qYaw, nYaw = constants
    dRq = np.dot(tp(wRd),wRq)
    return np.sqrt(np.sum(np.reshape(homerrf_dt(prm,qray,dray,ddep,dRq,wRd,nYaw),[3,-1])**2,0))
コード例 #49
0
def estimate_pose(C, Q, dbmatch, gtStatus=None):

    # settings
    param = C.pose_param
    runflag = param['runflag']
    np.seterr(all='ignore')
    Q.datafile = os.path.join(C.pose_param['resultsdir'],'data_'+Q.name+'.txt')
    open(Q.datafile,'w').close()

    #####-----    PRINT RUN DETAILS    -----#####
    run_info = os.path.join(param['resultsdir'],param['run_info'])
    open(run_info,'w').close()
    with open(run_info,'a') as ri:
        if runflag == 11:   print >>ri, 'Method: Yaw, planes from VPs. Scale computed with homography.'
        elif runflag == 10: print >>ri, 'Method: Yaw, planes from VPs. Scale computed after homography.'
        if param['cheat']:  print >>ri, 'Ground truth yaw and plane used (cheating).'
        print >>ri, 'Inlier base error threshold: %.3f' % param['inlier_error']
        print >>ri, 'Base iteration scale: %d' % param['ransac_iter']
    #####-----    PRINT RUN DETAILS    -----#####

    # get high res db image and sift paths
    dbinfo = os.path.join(C.hiresdir, dbmatch + '.info')
    dbimg = os.path.join(C.hiresdir,dbmatch+'.jpg')
    dbsift = os.path.join(C.hiresdir,dbmatch+'sift.txt')
    dbsource = render_tags.EarthmineImageInfo(dbimg, dbinfo)

    # Set Kd, wRd, and db position
    wx,wy = dbsource.image.size
    fov = dbsource.fov
    Kd = geom.cameramat(wx, wy, fov)
    Kdinv = alg.inv(Kd)
    y,p,r = dbsource.yaw, dbsource.pitch, dbsource.roll
    wRd = geom.RfromYPR(y,p,r) # db camera orientation (camera to world)
    olat,olon,oalt = dbsource.lat,dbsource.lon,dbsource.alt # database location

    # get high res query information
    qname = Q.name
    qimg = os.path.join(C.querydir,'hires',qname+'.jpg')
    qsift = os.path.join(C.querydir,'hires',qname+'sift.txt')
    qsource = render_tags.QueryImageInfo(Q.datasource)
    glat,glon = qsource.lat,qsource.lon
    gzx = geom.lltom(olat,olon,glat,glon)
    timename = qname[-12:-10]+qname[-9:-7]+qname[-6:-4]#+qname[-3:]

    # Set Kq
    wx,wy = qsource.image.size
    fov = qsource.fov
    Kq = geom.cameramat(wx, wy, fov)
    Kqinv = alg.inv(Kq)
    cyaw,p,r = qsource.yaw, qsource.pitch, qsource.roll # cyaw - cell phone yaw

    # get high res sift rematch
    matches = highresSift(C, Q, dbmatch)
    with open(Q.datafile,'a') as df:
        print >>df, 'Number of matches | number of queries | ratio: %.0f | %.0f | %.2f' % (matches['nmat'], matches['numq'], float(matches['nmat'])/matches['numq'])
        print >>df, ''
        
    # Get estimated ground truth query location and normal direction
    tlat, tlon, tnorm, tyaw = getGTpose(C, Q)
    qzx = geom.lltom(olat,olon,tlat,tlon)

    # get query yaw and plane yaw from vanishing point anaylsis
    yawforvp = tyaw if param['cheat'] else np.nan
    vyaw, vnorms = vp_analysis.getQNyaws(C, Q, qimg, dbimg, qsource, yawforvp) 

    # get dominant planes
    dplanes, psizes, planeprms = find_dbplanes(C, Q, dbmatch, Kdinv, wRd)
    
    # match vanishing point planes to database planes
    pyaws, planes, pconfs = combine_planes(runflag,vnorms,dplanes,psizes,planeprms)

    print 'VP and DB Planes: ' + str(np.int_(pyaws)) + ', ' + str(planes)

    with open(Q.datafile,'a') as df:
#        print >>df, 'Planes detected with vanishing points:'
        for i in range(len(pconfs)):
            perr = np.round(np.mod(pyaws[i]-tnorm,360))
            perr = perr if perr<180 else 360-perr
            print >>df, 'Plane Yaw | DB plane | Confidence | Error : %3.0f | %d | %.2f | %.0f' % (pyaws[i],0 if planes[i]<0 else planes[i],pconfs[i],perr)
        yerr = np.round(np.mod(vyaw-tyaw,360))
        yerr = yerr if yerr<180 else 360-yerr
        print >>df, 'VP Yaw | Confidence | Error : %3.0f | %.2f | %.0f' % (vyaw,np.nan,yerr)
        print >>df, 'Cell yaw | True yaw | Plane : %3.0f | %3.0f  | %3.0f' % (cyaw,tyaw,tnorm)
        print >>df, ''

    # Set yaw value to be used
    if runflag >= 10: # vanishing point methods
        if np.isnan(vyaw): yaw, yawerr = cyaw, 0
        else: yaw, yawerr = vyaw, 0
    else: yaw, yawerr = cyaw, 0 # set cell phone yaw to use, plane normal
    wRq = geom.RfromYPR(yaw,p,r) # camera orientation (camera to world)

    ### --- THIS IS FOR CHEATING --- ###
    if param['cheat']:
        if not np.isnan(tnorm):
            pyaws, planes, pconfs = np.append(pyaws,tnorm), np.append(planes,-1), np.append(pconfs,1)
        yaw, yawerr = tyaw, 0
        wRq = geom.RfromYPR(yaw,p,r) # camera orientation (camera to world)
    ### --- THIS IS FOR CHEATING --- ###

    # print pre-homography data to file
    vyaw_err = np.round(np.round(np.mod(tyaw-vyaw,360))) if not np.isnan(vyaw) else np.nan
    vyaw_err = vyaw_err if vyaw_err<180 else 360-vyaw_err
    dbears = np.mod( 180/np.pi*np.arctan2(planeprms[:,0],planeprms[:,2]) , 360 )
    print 'Computed / ground truth cell phone yaw: %.0f / %.0f' % (vyaw,tyaw)
    with open(os.path.join(param['resultsdir'],param['extras_file']),'a') as extras_file:
        print >>extras_file, '\t'.join([timename, '%.0f' % tnorm, '%.0f' % np.round(tyaw), '%.0f' % cyaw, '%.0f' % vyaw, '%.4f'%np.nan, str(vyaw_err)])
        print >>extras_file, '\t'.join([ '%.4f' % 0 for vnorm in vnorms ])
        print >>extras_file, '\t'.join([ '%.0f'   % vnorm  for vnorm  in vnorms  ])
        print >>extras_file, '\t'.join([ '%.0f'   % plane  for plane  in planes  ])
        print >>extras_file, '\t'.join([ '%.0f'   % dplane for dplane in dplanes ])
        print >>extras_file, '\t'.join([ '%.0f'   % dbear  for dbear  in dbears  ])
        print >>extras_file, '\t'.join([ '%.3f' % dnerr  for dnerr  in planeprms[:,4] ])

    # Fill out match information
    nmat = matches['nmat']
    matches['qray'] = geom.normalrows(tp(np.dot(Kqinv,np.append(tp(matches['q2d']),[np.ones(nmat)],0))))
    matches['dray'] = geom.normalrows(tp(np.dot(Kdinv,np.append(tp(matches['d2d']),[np.ones(nmat)],0))))
    matches = match_info(C, Q, matches, dbmatch, wRd)
    matches_start = matches.copy()

    # Solve for query pose using constrained image geometry
    init_matches = initMatches(matches.copy())
    matches['numi'], matches['hconf'] = 0, 0
    runflag, ntry, planechose = param['runflag'], 0, 0
    parameters = ( wRq, wRd, yaw, np.nan, runflag, param['inlier_error'], param['ransac_iter'], 10, True )
    if param['ransac_iter'] == 0:
        matches = init_matches
        matches['numi'], matches['hconf'] == 0, 0
        pose = np.zeros(6)
        pose[3:5] = np.nan
    elif runflag < 10:
        matches, pose = solveGeom(init_matches,parameters,yawerr)
    else:
        ntry = 1
        viter = -np.ones(3)
        parameters = ( wRq, wRd, yaw, np.nan, runflag, param['inlier_error'], param['ransac_iter'], 15, True )
        matches, pose, planechose = solveNorm(C,Q,dbmatch,pyaws,planes,init_matches,parameters,yawerr)
        viter[0] = matches['viter']
        if matches['numi'] == 0 or matches['hconf'] == 0:
            ntry = 2
            parameters = ( wRq, wRd, yaw, np.nan, runflag, 3*param['inlier_error'], param['ransac_iter'], 10, True )
            matches, pose, planechose = solveNorm(C,Q,dbmatch,pyaws,planes,init_matches,parameters,yawerr)
            viter[1] = matches['viter']
        if matches['numi'] == 0 or matches['hconf'] == 0:
            ntry, planechose = 3, 0
            parameters = ( wRq, wRd, yaw, np.nan, 7, 3*param['inlier_error'], param['ransac_iter'], 10, True )
            matches, pose = solveYaw(init_matches,parameters,yawerr)
            viter[2] = matches['viter']
        if matches['numi'] == 0 or matches['hconf'] == 0:
            ntry, planechose = 4, 0

    # save matches to disk
    matches_file = os.path.join(param['resultsdir'],'matches_'+qname+'.pkl')
    matches_out = open(matches_file,'wb')
    pickle.dump(matches,matches_out)
    matches_out.close()
    
    # extract pose parameters
    comp_runflag = matches['runflag']
    tray = pose[:3]
    comp_yaw = pose[3]
    comp_pyaw = pose[4] if runflag>=0 else np.nan
    scale = pose[5] if runflag>=0 else np.nan

    # Get scaled translation for query location
    if np.isnan(scale):
        wRq_pr = geom.YPRfromR(wRq)[1:]
        comp_wRq = geom.RfromYPR(comp_yaw, wRq_pr[0], wRq_pr[1])
        qloc = scalefrom3d(matches, tray, comp_wRq)[0]
    else: # scale calculated in RANSAC loop
        qloc = scale*tray

    # temporarily get yaw error
    qyaw_error = np.round(abs(np.mod(tyaw-comp_yaw,360)))
    qyaw_error = qyaw_error if qyaw_error<180 else abs(qyaw_error-360)

    # compute location errors wrt estimated query locations
    loc_err = ( (qloc[0]-qzx[1])**2 + (qloc[2]-qzx[0])**2 )**0.5
    gps_err = ( (gzx[1] -qzx[1])**2 + (gzx[0] -qzx[0])**2 )**0.5

    # compute the angle difference between T and ground truth translation
    tyaw_error = np.round(abs( 180/np.pi * np.arccos( np.abs(qloc[0]*qzx[1]+qloc[2]*qzx[0]) / (alg.norm([qloc[0],qloc[2]])*alg.norm(qzx)) ) ))

    # compute the plane normal angle error
    nyaw_error = np.nan if np.isnan(comp_pyaw) or np.isnan(tnorm) else np.mod(np.round(abs(comp_pyaw-tnorm)),180)
    nyaw_error = nyaw_error if nyaw_error<90 else abs(nyaw_error-180)

    # write pose estimation results to file
    yaw_err = np.nan
    pose_file = os.path.join(param['resultsdir'],param['pose_file'])
    with open(pose_file,'a') as pf:
        print >>pf, '\t'.join([qname, str(loc_err), str(gps_err), \
            str(tyaw_error), str(qyaw_error), str(nyaw_error), str(matches['numi']), \
            str(matches['numq']), str(matches['nmat']), str(matches['hconf']), \
            str(qloc[0]), str(qloc[2]), str(yaw_err), str(runflag)])

    # print post-homography data to file
    with open(Q.datafile,'a') as df:
        print >>df, ''
        print >>df, '------------------'
        print >>df, ''
        if ntry==1:   print >>df, 'Homography solution using low error threshold with restrictions.'
        elif ntry==2: print >>df, 'Homography solution using high error threshold with restrictions.'
        else:         print >>df, 'Solution not found. Setting T=0.'
        if planechose==0: print >>df, 'Solution formed with unset plane normal.'
        else:                         'Solution chosen with plane normal %d chosen.' % planechose
        print >>df, 'VP yaw | Computed yaw | Actual Yaw | Error : %3.0f | %3.0f | %3.0f | %3.0f' % (vyaw,comp_yaw,tyaw,qyaw_error)
        print >>df, 'Computed Normal | Actual Normal | Error : %3.0f | %3.0f | %3.0f' % (comp_pyaw,tnorm,nyaw_error)
        print >>df, 'Translation   (x|y|z): %.1f | %.1f | %.1f' % (qloc[0],qloc[1],qloc[2])
        print >>df, 'True position (x|-|z): %.1f |  -  | %.1f' % (qzx[1],qzx[0])
        print >>df, 'Angle error | Location error: %.0f | %.1f' % (tyaw_error,loc_err)
        print >>df, 'Number of Inliers | Total matches | Ratio: %d | %d | %.2f' % (matches['numi'],matches['nmat'],np.nan if matches['nmat']==0 else float(matches['numi'])/matches['nmat'])
        print >>df, 'Reprojection error | Homography confidence: %.3f | %.1f' % (matches['rperr'],matches['hconf'])
        print >>df, 'Valid Homographies | Iterations | Ratio: %d | %d | %.3f' % (matches['viter'],matches['niter'],np.nan if matches['niter']==0 else float(matches['viter'])/matches['niter'])
        print >>df, ''
        print >>df, '------------------'
        print >>df, ''
        booleans = [ loc_err<5, loc_err<10, not(5<np.mod(vyaw-tyaw,360)<355), \
                     not(10<np.mod(vyaw-tyaw,360)<350), \
                     not(5<np.mod(comp_yaw-tyaw,360)<355), ntry==1, ntry!=3, \
                     planechose!=0, matches['nmat']!=matches_start['nmat'], \
                     0 if planechose==0 else pconfs[planechose-1]>0, comp_yaw-vyaw ]
        print >>df, '|'.join(['%.0f' % (b) for b in booleans])

    # draw matches
    close = int(loc_err<5) + int(loc_err<10)
    yawclose = int(not(5<np.mod(vyaw-tyaw,360)<355)) + int(not(10<np.mod(vyaw-tyaw,360)<350))
    imgpath = os.path.join( param['resultsdir'] , qname + ';locerr=%.2f' % (loc_err) + ';locPerf_' + str(close) \
        + ';yawPerf_' + str(yawclose) + ';nplanes_' + str(len(pyaws)) + ';plane_' + str(planes[planechose]) + ';try_' + str(ntry) \
        + ';tAng=%.0f' % (tyaw_error) + ';qAng=%.0f' % (qyaw_error) + ';nAng=%.0f' % (nyaw_error) + ';' + dbmatch + '.jpg')
    draw_matches(matches, qimg, dbimg, imgpath)
#    imgpath = os.path.join( param['resultsdir'] , 'homography;' + qname + ';' + dbmatch + '.jpg')
#    draw_hom(matches, pose, wRq, wRd, Kq, Kd, qimg, dbimg, imgpath)
    if C.QUERY == 'oakland1': C.pose_param['draw_tags'] = False
    if C.pose_param['draw_tags']: draw_tags(C, Q, matches, pose, dbmatch, olat, olon, Kd, Kq)

    print 'Computed yaw / ground truth yaw        : %.0f / %.0f' % (comp_yaw,tyaw)
    if runflag < 10: print 'Computed normal bearing / ground truth : %.0f / %.0f' % (comp_pyaw,tnorm)
    print 'Computed query location relative to db     : %.1f, %.1f, %.1f' % tuple(qloc)
    print 'Ground truth query location relative to db : %.1f,  - , %.1f' % (qzx[1],qzx[0])

    input = (wRq, wRd)

    return qloc, loc_err, matches, input
コード例 #50
0
def VPQfromQuery(C, Q, qimg, qsource, vps, vnorms, vpconfs, vp_threshold, tyaw):

    # get query vanishing points
    qname = os.path.basename(qimg)
    qpath = os.path.join(C.querydir, 'hires', 'lsd', qname[:-4] + '.lsd')
    Kq, wRq = viewparam(qsource,tyaw)
    qmid, qleq, qlen = LfromLSD(qpath, qimg, Kq)
    qvps, conf, qcent, seedlens = VPfromSeeds(qmid, qleq, qlen, wRq, vp_threshold)
    nqvps = len(conf)

    #####  combine candidate vanishing points and vp from db   #####
    #####  into an estimate of the true query yaw orientation  #####

    # map vanishing points to world frame
    qvps = tp(np.dot(wRq,tp(qvps)))
    
    # align vanishing points based on normal and compute normals
    qnorms = geom.normalrows(np.cross(qvps,[0,1,0]))
    for i in range(len(conf)):
        if np.dot(tp(wRq),qnorms[i,:])[2] > 0:
            qnorms[i,:] *= -1
            qvps[i,:] *= -1

    # find optimal alignment of vanishing points
    cyaw = geom.YPRfromR(wRq)[0] # cell phone yaw
    byaw, bconf, bvps, bnorms, bvpconfs, nvps = np.nan, 0, vps, vnorms, vpconfs, len(vpconfs)

#    print '------------------------'
#    print vpconfs
#    print np.mod( vnorms , 360)
#    print conf
#    print np.mod( 180/np.pi * np.arctan2(qnorms[:,0],qnorms[:,2]) , 360 )
#    print '------------------------'
    qnormyaws = 180/np.pi * np.arctan2(qnorms[:,0],qnorms[:,2])
    for i in range(len(vpconfs)):
        for j in range(len(conf)):
            # compute relative yaw change
            vnormyaw = vnorms[i] #180/np.pi * np.arctan2(vnorms[i,0],vnorms[i,2])
            qnormyaw = qnormyaws[j]
            dyaw = vnormyaw - qnormyaw
            dyaw = dyaw if dyaw<180 else dyaw-360
            if abs(dyaw) > 50: continue # skip if the yaw change is too great
            # apply relative yaw change
            dR = geom.RfromYPR(dyaw,0,0)
            dvps, dnorms = tp(np.dot(dR,tp(qvps))), tp(np.dot(dR,tp(qnorms)))
            # get list of matching vanishing points
            dbidx, qidx, weights = np.zeros(0,np.int), np.zeros(0,np.int), np.zeros(0)
            # Gather lise of aligned vanishing points
            for k in range(len(vpconfs)):
                vpalign = np.inner(dvps,vps[k,:])
                alignidx = np.argmax(vpalign)
                if vpalign[alignidx] < np.cos(np.pi/180*2*vp_threshold): continue
                dbidx, qidx = np.append(dbidx,k), np.append(qidx,alignidx)
                weights = np.append(weights,conf[alignidx]*vpconfs[k])
            # Optimize for the yaw change
            yawconf = np.sum(weights)
            if yawconf <= bconf: continue
            dyaws = np.mod(vnorms[dbidx]-qnormyaws[qidx],360)
            if dyaws[0] < 90: dyaws[dyaws>270] = dyaws[dyaws>270]-360
            elif dyaws[0] > 270: dyaws[dyaws<90] = dyaws[dyaws<90]+360
            dyaw = np.sum(weights*dyaws) / yawconf
            byaw, bconf, bvpconfs, nvps = np.mod(cyaw+dyaw,360), yawconf, np.ones(len(weights)), len(weights)
            bnorms = np.mod( qnormyaws[qidx] + dyaw , 360 )

    return byaw, bconf, bvps, bnorms, bvpconfs, nqvps
コード例 #51
0
def alignVPcost(dyaw,vpmat1,vpmat2,weights):
    dR = geom.RfromYPR(dyaw[0],0,0)
    matchdot = np.sum( vpmat1*tp(np.dot(dR,tp(vpmat2))) , 1 )
    yaw_weight = 1 # np.cos(np.pi/180*dyaw[0])
    return -yaw_weight*np.sum(weights*matchdot)
コード例 #52
0
        xdrawcircle(start,'red')
        xdrawline((start,stop),'green',width=3)
        xdrawcircle(stop,'red')

    ### draw homography boxes ###

    # compute box center and corners
    pd = matches['iprm'][-1]
    tw = pose[5]*pose[:3]
    cd, xd = np.array([.4,-.1,1]), geom.normalrows(np.array([.5,-.2,1]))
    cw, xw = np.dot(wRd,cd), np.dot(wRd,xd)
    nw = -np.array([np.sin(pose[4]*np.pi/180),0,np.cos(pose[4]*np.pi/180)])
    cw, xw = pd/np.dot(nw,cw)*cw, pd/np.dot(nw,xw)*xw
    trw, brw, tlw, blw = xw.copy(), xw.copy(), 2*cw-xw, 2*cw-xw
    brw[1], tlw[1] = blw[1], trw[1]
    trq, brq, tlq, blq = np.dot(tp(wRq),trw-tw), np.dot(tp(wRq),brw-tw), np.dot(tp(wRq),tlw-tw), np.dot(tp(wRq),blw-tw)
    trd, brd, tld, bld = np.dot(tp(wRd),trw), np.dot(tp(wRd),brw), np.dot(tp(wRd),tlw), np.dot(tp(wRd),blw)
    # draw query box
    trp, brp, tlp, blp = np.dot(Kq,trq), np.dot(Kq,brq), np.dot(Kq,tlq), np.dot(Kq,blq)
    trp, brp, tlp, blp = (trp/trp[2])[:2], (brp/brp[2])[:2], (tlp/tlp[2])[:2], (blp/blp[2])[:2]
    xdrawline((scale*trp,scale*brp),'green',off=0,width=10)
    xdrawline((scale*brp,scale*blp),'green',off=0,width=10)
    xdrawline((scale*blp,scale*tlp),'green',off=0,width=10)
    xdrawline((scale*tlp,scale*trp),'green',off=0,width=10)
    # draw database box
    trp, brp, tlp, blp = np.dot(Kd,trd), np.dot(Kd,brd), np.dot(Kd,tld), np.dot(Kd,bld)
    trp, brp, tlp, blp = (trp/trp[2])[:2], (brp/brp[2])[:2], (tlp/tlp[2])[:2], (blp/blp[2])[:2]
    xdrawline((trp,brp),'green',off=off,width=10)
    xdrawline((brp,blp),'green',off=off,width=10)
    xdrawline((blp,tlp),'green',off=off,width=10)
    xdrawline((tlp,trp),'green',off=off,width=10)
コード例 #53
0
def VPfromSeeds(midpts, lineqs, lengths, Rot, tol):

    # Generate seeds from rotation matrix
    total_len = np.sum(lengths)
    seed_tol = 2*tol
    yaw, Rpr = geom.YfromR(Rot)
    angles = np.arange(0,180,3)
    nvps = len(angles)
    vps, vlens = np.zeros((nvps,3)), np.zeros(nvps)
    for i in xrange(nvps):
        vps[i,:] = np.dot( tp(Rpr) , [np.sin(np.pi/180*angles[i]),0,np.cos(np.pi/180*angles[i])] )

    # Iterate through each line and assign its weight to top N seeds
    nseeds = 3
    for i in xrange(len(lengths)):
        midpt, lineq, length = midpts[i,:], lineqs[i,:], lengths[i]
        line_bear = np.arctan(lineq[0]/lineq[1])
        vp_bear = np.arctan( (midpt[1]-vps[:,1]/vps[:,2]) / (vps[:,0]/vps[:,2]-midpt[0]) )
        dbear = np.mod(line_bear-vp_bear,np.pi)
        dbear = dbear + (dbear>np.pi/2) * (np.pi-2*dbear)
        vpdist = geom.vecnorm(geom.vecsub(geom.vecdiv(vps,vps[:,2],0),midpt,1))
        mask = vpdist < length/2
        dbear[mask] = np.pi
        minidx = np.argsort(dbear)[:nseeds]
        vlens[minidx] += length/total_len

    # Pick true vanishing points from seeds
#    if True:
#        file = '/media/DATAPART2/ah/pose_runs/tmp.txt'
#        open(file,'w').close()
#        with open(file,'a') as f:
#            for tmp in vlens: print >>f, '%.3f' % tmp
    seedlens = vlens
    neighbors = np.amax([np.roll(vlens,-3),np.roll(vlens,-2),np.roll(vlens,-1), \
                         np.roll(vlens,1),np.roll(vlens,2),np.roll(vlens,3)],0)
    localmax = vlens > neighbors
    random_fraction = float(nseeds) / len(angles)
    lengthmask = vlens > 1.5*random_fraction
    vpmask = np.logical_and(localmax,lengthmask)
    vps, vlens, nvps = vps[vpmask,:], vlens[vpmask], np.sum(vpmask)

    # Guided matching to refine the vanishing points
    vcents = np.zeros((nvps,3))
    maxiter = 10
    for i in xrange(nvps):
        vp, vlen = vps[i,:], vlens[i]
        gmtol, gmit, llen = tol, 0, 0
        while vlen!=llen and gmit<maxiter:
            gmit += 1
            linemask = LfromVP(vp,midpts,lineqs,lengths,gmtol)
            vp = VPfromLines(lineqs[linemask,:])
            vlen, llen = np.sum(lengths[linemask]), vlen
        vcent = np.mean(midpts[linemask,:],0)
        vps[i,:], vlens[i], vcents[i,:] = vp, vlen, vcent
    vlens = vlens / total_len

    # eliminate vanishing points without a significant contribution from lines
    keepmask = vlens > 5/90  # "random" line length expected
    vps, vlens, vcents = vps[keepmask,:], vlens[keepmask], vcents[keepmask,:]

    # adjust the sign of vanishing points so that normal associated with vp cross down faces toward camera
    for i in range(len(vlens)):
        vpnorm = np.cross( vps[i,:] , np.dot(tp(Rpr),[0,1,0]) )
        if vpnorm[2] > 0:
            vps[i,:] *= -1
    
    return vps, vlens, vcents, seedlens
コード例 #54
0
def VPNfromDatabase(C, Q, dimg, vp_threshold):

    main_bias, off_bias = 1, 0

    if off_bias == 0:

        dname = os.path.basename(dimg)
        himg, dinfo, dpath = os.path.join(C.hiresdir, dname[:-4] + '.jpg'), \
                             os.path.join(C.hiresdir, dname[:-4] + '.info'), \
                             os.path.join(C.hiresdir, dname[:-4] + '.lsd')
        dsource = render_tags.EarthmineImageInfo(himg,dinfo)
        Kd, wRd = viewparam(dsource,np.nan)
        dmid, deqs, dlen = LfromLSD(dpath,himg,Kd)
        dvps, dcon, dcent, dseeds = VPfromSeeds(dmid, deqs, dlen, wRd, vp_threshold)
        vps, conf, cent = tp(np.dot(wRd,tp(dvps))), dcon, dcent
        nvps = len(conf)
        if nvps == 0: return np.zeros((0,3)), np.zeros((0,3)), np.zeros(0), np.zeros(0) # return if no vanishing points
    
    else:

        # get 3 database images
        dname = os.path.basename(dimg)
        view = int(dname[-6:-4])
        if view < 6: # right side of street
            limg, linfo, lpath = os.path.join(C.hiresdir, dname[:-6] + '02.jpg'), \
                                 os.path.join(C.hiresdir, dname[:-6] + '02.info'), \
                                 os.path.join(C.hiresdir, 'lsd', dname[:-6] + '02.lsd')
            cimg, cinfo, cpath = os.path.join(C.hiresdir, dname[:-6] + '03.jpg'), \
                                 os.path.join(C.hiresdir, dname[:-6] + '03.info'), \
                                 os.path.join(C.hiresdir, 'lsd', dname[:-6] + '03.lsd')
            rimg, rinfo, rpath = os.path.join(C.hiresdir, dname[:-6] + '04.jpg'), \
                                 os.path.join(C.hiresdir, dname[:-6] + '04.info'), \
                                 os.path.join(C.hiresdir, 'lsd', dname[:-6] + '04.lsd')
        else: # left side of street
            limg, linfo, lpath = os.path.join(C.hiresdir, dname[:-6] + '08.jpg'), \
                                 os.path.join(C.hiresdir, dname[:-6] + '08.info'), \
                                 os.path.join(C.hiresdir, 'lsd', dname[:-6] + '08.lsd')
            cimg, cinfo, cpath = os.path.join(C.hiresdir, dname[:-6] + '09.jpg'), \
                                 os.path.join(C.hiresdir, dname[:-6] + '09.info'), \
                                 os.path.join(C.hiresdir, 'lsd', dname[:-6] + '09.lsd')
            rimg, rinfo, rpath = os.path.join(C.hiresdir, dname[:-6] + '10.jpg'), \
                                 os.path.join(C.hiresdir, dname[:-6] + '10.info'), \
                                 os.path.join(C.hiresdir, 'lsd', dname[:-6] + '10.lsd')
            lsource = render_tags.EarthmineImageInfo(limg, linfo)
            csource = render_tags.EarthmineImageInfo(cimg, cinfo)
            rsource = render_tags.EarthmineImageInfo(rimg, rinfo)

        # extract view parameters
        Kl, wRl = viewparam(lsource,np.nan)
        Kc, wRc = viewparam(csource,np.nan)
        Kr, wRr = viewparam(rsource,np.nan)
        
        # get lines for each database image; image frame equations and segment lengths
        lmid, leqs, llen = LfromLSD(lpath, limg, Kl)
        cmid, ceqs, clen = LfromLSD(cpath, cimg, Kc)
        rmid, reqs, rlen = LfromLSD(rpath, rimg, Kr)

        # get candidate vanishing points from lines
        lvps, lcon, lcent, lseeds = VPfromSeeds(lmid, leqs, llen, wRl, vp_threshold)
        cvps, ccon, ccent, cseeds = VPfromSeeds(cmid, ceqs, clen, wRc, vp_threshold)
        rvps, rcon, rcent, rseeds = VPfromSeeds(rmid, reqs, rlen, wRr, vp_threshold)

        #####  combine candidate vanishing points and into an estimate of   #####
        #####  the building faces' horizontal vanishing points and normals  #####

        # increase the confidence of vps from the matched view and
        if    view==2 or view==8  : lcon, ccon, rcon, ccent, rcent, ndvps, seedlens = main_bias*lcon, off_bias*ccon, off_bias*rcon, 0*ccent, 0*rcent, len(lvps), lseeds
        elif  view==3 or view==9  : lcon, ccon, rcon, lcent, rcent, ndvps, seedlens = off_bias*lcon, main_bias*ccon, off_bias*rcon, 0*lcent, 0*rcent, len(cvps), cseeds
        elif  view==4 or view==10 : lcon, ccon, rcon, lcent, ccent, ndvps, seedlens = off_bias*lcon, off_bias*ccon, main_bias*rcon, 0*lcent, 0*ccent, len(rvps), rseeds

        # map the vanishing points to the world frame (EDN - east/down/north) and combine all vps
        lvps, cvps, rvps = tp(np.dot(wRl,tp(lvps))), tp(np.dot(wRc,tp(cvps))), tp(np.dot(wRr,tp(rvps)))
        vps, conf, cent = np.concatenate( (lvps,cvps,rvps) , 0 ), np.concatenate((lcon,ccon,rcon)), np.concatenate( (lcent,ccent,rcent) , 0 )
        nvps = len(conf)
        if nvps == 0: return np.zeros((0,3)), np.zeros((0,3)), np.zeros(0), np.zeros(0) # return if no vanishing points

    # get normals and remove vanishing points indicating more than a ~18 degree incline
    normals = np.cross(vps,[0,1,0])
    mask = geom.vecnorm(normals) > 0.95
    vps, cent, normals, conf = vps[mask,:], cent[mask,:], geom.normalrows(normals[mask,:]), conf[mask]
    nvps = len(conf)

    # sort vanishing points by confidence
    sort = np.argsort(conf)
    vps, cent, conf = vps[sort[::-1],:], cent[sort[::-1],:], conf[sort[::-1]]

    # combine all vanishing points
    minconf = 0.2  # average 20% of line length in each image OR 50% of line length in retrieved image
    bvps, bcenters, bnorms, bconfs = np.zeros((0,3)), np.zeros((0,3)), np.zeros(0), np.zeros(0)
    while len(conf)!=0:
        vp = vps[0,:]
        mask = np.inner(vps,vp) > np.cos(vp_threshold*np.pi/180)
        c = np.sum(conf[mask])/(2*off_bias+main_bias)
        if c > minconf:
            vp = geom.largestSingVector(geom.vecmul(vps[mask,:],conf[mask]))
            if np.inner(vps[0,:],vp) < 0: vp = -vp
            normal = np.cross(vp,[0,1,0])
            nyaw = np.mod( 180/np.pi * np.arctan2(normal[0],normal[2]) , 360 )
            bvps = np.concatenate( (bvps,[vp]) , 0 )
            bnorms, bconfs = np.append(bnorms,nyaw), np.append(bconfs,c)
            centmask = np.logical_and(mask,cent[:,2]!=0)
            center = np.mean(cent[centmask,:],0)
            bcenters = np.concatenate( (bcenters,[center]) , 0 )
            keep = np.logical_not(mask)
            vps, conf, cent = vps[keep,:], conf[keep], cent[keep,:]
        else:
            vps, conf, cent = np.delete(vps,0,0), np.delete(conf,0), np.delete(cent,0,0)

    # sort best vanishing points by confidence
    if len(bconfs) == 0: return bvps, bcenters, bnorms, bconfs
    sort = np.argsort(bconfs)
    bvps, bcenters, bnorms, bconfs = bvps[sort[::-1],:], bcenters[sort[::-1],:], bnorms[sort[::-1]], bconfs[sort[::-1]]

    return bvps, bcenters, bnorms, bconfs, nvps
コード例 #55
0
ファイル: helper.py プロジェクト: famagusta/sca_python
def compute_pos_corr_matrix(X, N):
    '''computes positional correlation matrix'''
    pos_mat_prod = dot(tp(X), X)/N
    pos_avg_prod = dot(tp(matrix(mean(X, 0))), matrix(mean(X, 0)))
    pos_corr = npabs(pos_mat_prod - pos_avg_prod)
    return pos_corr