Пример #1
0
 def set_test_data(self, img_file):
     self.lm = landmark.Landmark(img_file)
     self.lm.get_face_landmarks()
     if self.lm.coordinate is None:
         return None
     testItem = np.array(self.lm.coordinate).reshape((1, 30))
     return testItem
Пример #2
0
def lookup(engine, file):
    if engine == "echoprint":
        fp = echoprint.Echoprint()
        model = echoprint.EchoprintModel
    elif engine == "chromaprint":
        fp = chromaprint.Chromaprint()
        model = chromaprint.ChromaprintModel
    elif engine == "landmark":
        fp = landmark.Landmark()
        model = landmark.LandmarkModel

    full_path = os.path.abspath(file)
    cur = db.session.query(db.FPFile).filter(db.FPFile.path == full_path)
    if cur.count() > 0:
        thefile = cur.one()
    else:
        raise Exception("Can't find the file in the database. Is it ingested?")
    cur = db.session.query(model).filter(model.file_id == thefile.id)
    if cur.count() > 0:
        thefp = cur.one()
    else:
        raise Exception(
            "Can't find the canonical FP for this file. Is it ingested?")

    res = fp.lookup(file)
    print "result for %s:" % file
    print res
    print "database reference:"
    print thefp.trid
Пример #3
0
def perform_create_landmark():

    logging.info("Start to create landmark database")

    all_news = NewsModel.all()
    for latlng in landmark.LANDMARK_KEYWORDS:
        counter = 0
        keywords = landmark.LANDMARK_KEYWORDS[latlng]
        related_news_keys = []
        for news in all_news:
            result = is_keyword_in_news(keywords, news.title, news.content)
            if result == True:
                related_news_keys.append(str(news.key()))
                counter += 1

        ret = str(related_news_keys)

        lm = landmark.Landmark(latlng, related_news_keys)
        lm.writetodb()
        logging.info("Create database for " + str(latlng) +
                     " completed, it has " + str(counter) + " news")

    logging.info("All database for landmark are completed")

    return "Write to db complete"
Пример #4
0
    def set_traning_data(self, img_path, txt_path):
        if op.isfile("./model/traningData.pkl"):
            print("already exist training file")
            return
        print 'make training data...'
        pf = preprocessing.Preprocessing()
        pf.set_img_path(img_path)
        pf.set_txt_path(txt_path)
        tag_list = pf.get_tag_list()

        trX = []
        trY = []

        print 'find coordinate...'
        count = 0
        total = len(tag_list)
        for temp in tag_list:
            lm = landmark.Landmark(img_path + '/' + temp[0] + '.jpg')
            lm.get_face_landmarks()
            if lm.coordinate is None:
                continue
            trX.append(lm.coordinate.reshape((1, 30))[0])
            trY.append(FTYPE[temp[1]])
            count += 1
            sys.stdout.write('\r%d%%' % int(float(count) / float(total) * 100))
            sys.stdout.flush()

        print '\nTotal', count, 'data,', total - count, 'data missing'

        trainingItem = np.array(trX), np.array(trY)
        pickle.dump(trainingItem, open("./model/traningData.pkl", "wb"))
        print 'create traningData.pkl file'
Пример #5
0
def main(engine, files):
    if engine == "echoprint":
        fp = echoprint.Echoprint()
        model = echoprint.EchoprintModel
    elif engine == "chromaprint":
        fp = chromaprint.Chromaprint()
        model = chromaprint.ChromaprintModel
    elif engine == "landmark":
        fp = landmark.Landmark()
        model = landmark.LandmarkModel

    for f in files:
        (fpid, data) = fp.fingerprint(f)
        fp.ingest_many([data])

        full_path = os.path.abspath(f)

        cur = db.session.query(db.FPFile).filter(db.FPFile.path == full_path)
        if cur.count() > 0:
            thefile = cur.one()
        else:
            thefile = db.FPFile(full_path)
            db.session.add(thefile)
            db.session.commit()

        thefp = model(thefile, fpid)
        db.session.add(thefp)
        db.session.commit()

        print "New fingerprint: %s (from %s)" % (fpid, f)
def add_landmark_arc_handler(req):
    global incoming_landmarks, incoming_landmarks_lock
    num = req.number
    rd = req.radius
    th1 = req.thetamin
    th2 = req.thetamax
    lmks = set()
    for th in np.linspace(np.pi * th1, np.pi * th2, num):
        ori = np.array([np.cos(th), np.sin(th)])
        pos = rd * ori
        lmk = lm.Landmark(pos=pos, ori=ori)
        lmks.add(lmk)
    incoming_landmarks_lock.acquire()
    if len(lmks) > 0:
        incoming_landmarks |= lmks
    incoming_landmarks_lock.release()
    return csv.AddLandmarkArcResponse()
def add_landmark_arc_handler(req):
    global landmarks
    global lock
    num = req.number
    rd = req.radius
    th1 = req.thetamin
    th2 = req.thetamax
    lmks = set()
    for th in np.linspace(np.pi * th1, np.pi * th2, num):
        ori = np.array([np.cos(th), np.sin(th)])
        pos = rd * ori
        lmk = lm.Landmark(pos=pos, ori=ori)
        lmks.add(lmk)
    lock.acquire()
    landmarks |= lmks
    lock.release()
    msg = csv.DrawLandmarksRequest(
        name=None, landmarks=[lmk.to_msg() for lmk in landmarks])
    draw_landmarks_proxy(msg)
    return csv.AddLandmarkArcResponse()
Пример #8
0
            arrow = plt.arrow(x,
                              y,
                              vec[0],
                              vec[1],
                              head_width=scale * 0.03,
                              head_length=scale * 0.03,
                              alpha=alpha,
                              facecolor=color,
                              edgecolor='#121f1f',
                              linewidth=2)
        return point, arrow


if __name__ == '__main__':
    import landmark as lm
    lmk = lm.Landmark()
    srr = Sensor()
    print srr.perception(lmk)
    print srr.coverage([lmk])
    print srr.per_pos_grad(lmk)
    print srr.cov_pos_grad([lmk])
    print srr.per_ori_grad(lmk)
    print srr.cov_ori_grad([lmk])
    plt.figure()
    plt.xlim(-5, 5)
    plt.ylim(-5, 5)
    lmk.draw()
    srr.draw()
    plt.grid()
    plt.show()
Пример #9
0
import world
import ideal_robot
import agent
import landmark
import map

if __name__ == "__main__":
    world = world.World(time_span=10.0, time_interval=0.1, debug=False)

    strait_agent = agent.Agent(0.2, 0.0)
    circle_agent = agent.Agent(0.2, 10.0 / 180 * math.pi)

    robot1 = ideal_robot.IdealRobot(np.array([2, 3, math.pi / 6]).T,
                                    agent=strait_agent)
    robot2 = ideal_robot.IdealRobot(np.array([-2, -1, math.pi / 5 * 6]).T,
                                    agent=circle_agent,
                                    color="red")
    robot3 = ideal_robot.IdealRobot(np.array([0, 0, 0]).T, color="blue")

    world.append(robot1)
    world.append(robot2)
    world.append(robot3)

    m = map.Map()
    m.append_landmark(landmark.Landmark(2, -2))
    m.append_landmark(landmark.Landmark(-1, -3))
    m.append_landmark(landmark.Landmark(3, 3))
    world.append(m)

    world.draw()
Пример #10
0
                ps = np.array([x, y, z_func(x, y)])
                Rs = Rs_func(x, y)
                zmat[j][i] = self.coverage(landmarks=landmarks, pos=ps, ori=Rs)
        print(zmat)
        cs = plt.contour(xvec, yvec, zmat, 20)
        plt.xlabel('$p_x$')
        plt.ylabel('$p_y$')
        #plt.axis('equal')
        plt.colorbar(cs)
        plt.grid()
        plt.savefig(filename)


if __name__ == '__main__':
    import landmark as lm
    lmk = lm.Landmark(pos=np.array([1, 0, 0]))
    srr = Sensor()
    print srr.perception(lmk)
    print srr.coverage([lmk])
    print srr.per_pos_grad(lmk)
    print srr.cov_pos_grad([lmk])
    print srr.per_ori_grad(lmk)
    print srr.cov_ori_grad([lmk])
    plt.figure()
    ax = plt.gca(projection='3d')
    ax.set_xlim3d(-3, 3)
    ax.set_ylim3d(-3, 3)
    ax.set_zlim3d(-3, 3)
    lmk.draw()
    srr.draw()
    plt.grid()
Пример #11
0
for idx in range(NUM_LANDMARKS):
    th = 2.0 * np.pi * float(idx) / float(NUM_LANDMARKS) * float(
        NUM_TURNS) + np.pi / 2
    x = RADIUS * np.cos(th)
    y = RADIUS * np.sin(th)
    z = HEIGHT * float(idx) / float(NUM_LANDMARKS) + HEIGHT / 2.0
    pos = np.array([x, y, z])
    ori = np.eye(3)
    ori[0, 0] = -np.cos(th)
    ori[1, 0] = -np.sin(th)
    ori[0, 1] = np.sin(th)
    ori[1, 1] = -np.cos(th)
    rp.logwarn(ori)
    #ori[0:2,1] = [-np.sin(th), np.cos(th)]
    lmk = lm.Landmark(pos=pos, ori=ori)
    INIT_LANDMARKS.add(lmk)
    INIT_ASSIGNMENT[NAMES[0]].add(lmk)

if __name__ == '__main__':
    plt.figure()
    for lmk in INIT_LANDMARKS:
        lmk.draw()
    sns = sn.Sensor()
    sns.contour_plot(INIT_LANDMARKS, xlim=XLIM, ylim=YLIM)
    plt.xlim(XLIM)
    plt.ylim(YLIM)
    plt.show()

# NUM_LANDMARKS_SQRT = 25
# XLIM = (-3,3)
Пример #12
0
def fast_slam_known_correspondence_turtlebot(v_c, w_c, ranges, bearings,
                                             correspondences, particles, dt, Q,
                                             alpha):

    #calculated a fit over experiments with values
    exponent = -0.0556 * len(ranges) + 8.5556

    scale = 10**exponent

    counter = 0

    for i in range(len(particles)):
        v_c_cov = alpha[0] * v_c * v_c + alpha[1] * w_c * w_c
        w_c_cov = alpha[2] * v_c * v_c + alpha[3] * w_c * w_c
        x = [[particles[i].x], [particles[i].y], [particles[i].theta]]
        # print "x before: ", particles[i].x
        # print "y before: ", particles[i].y
        # print "theta before: ", particles[i].theta
        x_next = dynamics.propogate_next_state(
            x, v_c + stat_filter.noise(v_c_cov),
            w_c + stat_filter.noise(w_c_cov), dt)
        particles[i].x = copy.deepcopy(x_next[0][0])
        particles[i].y = copy.deepcopy(x_next[1][0])
        particles[i].theta = copy.deepcopy(dynamics.wrap_angle(x_next[2][0]))

        # print i
        # print "x_p0: ", particles[0].x
        # print "y_p0: ", particles[0].y
        # print "theta_p0: ", particles[0].theta
        # print "x: ", particles[i].x
        # print "y: ", particles[i].y
        # print "theta: ", particles[i].theta

        # pdb.set_trace()
        particles[i].weight = 1.0

        for k in range(len(ranges)):

            j = correspondences[k]

            if particles[i].seen.count(j) < 1:
                particles[i].seen.append(j)

                # print "particle len: ", len(particles)
                # print "range len: ", len(ranges)
                # counter += 1
                # print counter
                x = particles[i].x + ranges[k] * np.cos(bearings[k] +
                                                        particles[i].theta)
                y = particles[i].y + ranges[k] * np.sin(bearings[k] +
                                                        particles[i].theta)

                ldm = landmark.Landmark(j, x, y)
                particles[i].landmarks.append(ldm)

                H = calc_H(ldm, particles[i])[0]
                H_inv = np.linalg.inv(H)
                ldm.sigma = np.dot(np.dot(H_inv, Q), H_inv.T)
                # print "sigma: ", ldm.sigma
                # print "H: ", H
                ldm.weight = scale * 1.0 / len(particles)
                particles[i].weight *= ldm.weight
                # particles[i].weight = 1.0/len(particles)

            else:
                idx = particles[i].seen.index(j)

                ldm = particles[i].landmarks[idx]

                z_hat, H = calc_z_hat_and_H(ldm, particles[i])

                S = np.dot(np.dot(H, ldm.sigma), H.T) + Q
                K = np.dot(np.dot(ldm.sigma, H.T), np.linalg.inv(S))

                z = np.array([[ranges[k]], [bearings[k]]])

                res = np.array(z - z_hat)

                res[1][0] = dynamics.wrap_angle(res[1][0])

                mu = np.array([[ldm.x], [ldm.y]]) + np.dot(K, res)

                ldm.x = mu[0][0]
                ldm.y = mu[1][0]

                ldm.sigma = np.dot(np.eye(2) - np.dot(K, H), ldm.sigma)

                particles[i].landmarks[idx] = ldm

                # particles[i].weight = np.linalg.det(2*np.pi*S)*np.exp(-.5*np.dot(np.dot(res.T,np.linalg.inv(S)),res))[0][0]
                ldm.weight = scale * (np.linalg.det(2 * np.pi * S) * np.exp(
                    -.5 * np.dot(np.dot(res.T, np.linalg.inv(S)), res))[0][0])
                particles[i].weight *= ldm.weight

                # print "Weights: ", particles[i].weight, " For: ", i

        print "Weights: ", particles[i].weight, " For: ", i

    particles = fast_slam_particle.normalize_weights(particles)
    new_particles = stat_filter.low_variance_sampler(particles)

    return new_particles
Пример #13
0
import coverage_planar_planner.msg as cms
import coverage_planar_planner.srv as csv
import std_msgs.msg as sms

import numpy as np
import threading as thd
import json
import os

import landmark as lm
import sensor as sn
import footprints as fp
import utilities as uts

lock = thd.Lock()
landmark = lm.Landmark()
sensor = sn.Sensor(fp=fp.EggFootprint())

rp.init_node('mobile_landmark_follower_node')

XLIM = [float(elem) for elem in rp.get_param('xlim', "-0.4 2.4").split()]
YLIM = [float(elem) for elem in rp.get_param('ylim', "-2.0 1.4").split()]
SCALE = (XLIM[1] - XLIM[0] + YLIM[1] - YLIM[0]) * 0.5

KP = rp.get_param('position_gain', 0.3 * SCALE)
KN = rp.get_param('orientation_gain', 1.0)
SP = rp.get_param('velocity_saturation', 0.05 * SCALE)
SN = rp.get_param('angular_velocity_saturation', 0.3)

vel_pub = rp.Publisher('cmd_vel', cms.Velocity, queue_size=10)
#lmks_pub = rp.Publisher('landmarks', cms.LandmarkArray, queue_size=10)
import rospy as rp

NUM_LANDMARKS_SQRT = rp.get_param('num_landmarks_sqrt', 20)
XLIM = [float(elem) for elem in rp.get_param('xlim', "-0.4 2.4").split()]
YLIM = [float(elem) for elem in rp.get_param('ylim', "-2.0 1.4").split()]

rp.logwarn(XLIM)
rp.logwarn(YLIM)

NAMES = rp.get_param('/names').split()

LANDMARKS = dict()
for name in NAMES:
    LANDMARKS[name] = set()

rdm.seed(89)

for index in range(NUM_LANDMARKS_SQRT**2):
    x = float(index % NUM_LANDMARKS_SQRT) * (
        XLIM[1] - XLIM[0]) / NUM_LANDMARKS_SQRT + XLIM[0]
    y = float(index // NUM_LANDMARKS_SQRT) * (
        YLIM[1] - YLIM[0]) / NUM_LANDMARKS_SQRT + YLIM[0]
    pos = (x, y)
    ori = (1, 0)
    lmk = lm.Landmark(pos, ori)
    #landmarks[rdm.choice(landmarks.keys())].add(lmk)
    LANDMARKS[NAMES[0]].add(lmk)

if __name__ == '__main__':
    print landmarks