Пример #1
0
def estimate_next_pos(measurement, OTHER=None):
    """Estimate the next (x, y) position of the wandering Traxbot
    based on noisy (x, y) measurements."""
    # TODO
#     if not OTHER:
#         OTHER = robot()
#     distance = distance_between(measurement, OTHER.sense())
#     x, y = measurement
#     dy = y - OTHER.y
#     dx = x - OTHER.x
#     last_heading = OTHER.heading
#     beta = atan2(dy, dx)
#     OTHER.heading = beta
#     turning = beta - last_heading
#     OTHER.move(turning, distance)   
#     xy_estimate = OTHER.sense()
#     OTHER = robot(measurement[0], measurement[1], beta)
#     return xy_estimate, OTHER
        
    if not OTHER:
        OTHER = robot()        
    dy = measurement[1] - OTHER.y
    dx = measurement[0] - OTHER.x
    beta = atan2(dy, dx)
    distance = distance_between(measurement, OTHER.sense())
    turning = beta - OTHER.heading
    heading = beta + turning
    xp = measurement[0] + distance * cos(heading)
    yp = measurement[1] + distance * sin(heading)
    OTHER = robot(measurement[0], measurement[1], beta)
    xy_estimate = (xp, yp)
    return xy_estimate, OTHER
Пример #2
0
    def __init_nodes__(self):
        self.ecm_hw = robot('ECM')
        self.psm1_hw = robot('PSM1')
        self.psm2_hw = robot('PSM2')
            
        #rospy.init_node('autocamera_node')
        
        self.logerror("start", debug=True)
        
        # Publishers to the simulation
        self.ecm_pub = rospy.Publisher('/dvrk_ecm/joint_states_robot', JointState, queue_size=10)
        self.psm1_pub = rospy.Publisher('/dvrk_psm1/joint_states_robot', JointState, queue_size=10)
        self.psm2_pub = rospy.Publisher('/dvrk_psm2/joint_states_robot', JointState, queue_size=10)
        
        # Get the joint angles from the simulation
        rospy.Subscriber('/dvrk_ecm/joint_states', JointState, self.add_ecm_jnt)
        rospy.Subscriber('/fakecam_node/camera_info', CameraInfo, self.get_cam_info)
        
        try:
            self.sub_psm1_sim.unregister()
            self.sub_psm2_sim.unregister()
            self.sub_psm1_hw.unregister()
            self.sub_psm2_hw.unregister()
        except Exception:
            pass
        if self.__AUTOCAMERA_MODE__ == self.MODE.hardware :
            # Get the joint angles from the hardware and move the simulation from hardware
            self.sub_psm1_hw = rospy.Subscriber('/dvrk/PSM1/state_joint_current', JointState, self.add_psm1_jnt)
            self.sub_psm2_hw = rospy.Subscriber('/dvrk/PSM2/state_joint_current', JointState, self.add_psm2_jnt)
            
            
        elif self.__AUTOCAMERA_MODE__ == self.MODE.simulation:
            # Get the joint angles from the simulation
            self.sub_psm1_sim = rospy.Subscriber('/dvrk_psm1/joint_states', JointState, self.add_psm1_jnt)
            self.sub_psm2_sim = rospy.Subscriber('/dvrk_psm2/joint_states', JointState, self.add_psm2_jnt)
            
            # If hardware is connected, subscribe to it and set the psm joint angles in the simulation from the hardware
            self.sub_psm1_hw = rospy.Subscriber('/dvrk/PSM1/state_joint_current', JointState, self.add_psm1_jnt_from_hw)
            self.sub_psm2_hw = rospy.Subscriber('/dvrk/PSM2/state_joint_current', JointState, self.add_psm2_jnt_from_hw)
            
            
        # Get the joint angles from MTM hardware
        ##rospy.Subscriber('/dvrk/MTML/position_joint_current', JointState, self.mtml_cb)
    #     rospy.Subscriber('/dvrk/MTMR/position_joint_current', JointState, add_psm1_jnt)
        
        # Detect whether or not the camera clutch is being pressed
##         rospy.Subscriber('/dvrk/footpedals/camera', Bool, self.camera_clutch_cb)
        
        # Move the hardware from the simulation
    #     rospy.Subscriber('/dvrk_psm1/joint_states', JointState, self.move_psm1)
    #     rospy.Subscriber('/dvrk_psm2/joint_states', JointState, self.move_psm2)
        
        if self.__DEBUG_GRAPHICS__ == True:
            # Subscribe to fakecam images
            rospy.Subscriber('/fakecam_node/fake_image_left', Image, self.left_image_cb)
            rospy.Subscriber('/fakecam_node/fake_image_right', Image, self.right_image_cb)
         
        # Publish images
        self.image_left_pub = rospy.Publisher('autocamera_image_left', Image, queue_size=10)
        self.image_right_pub = rospy.Publisher('autocamera_image_right', Image, queue_size=10)
Пример #3
0
def next_move(hunter_position,
              hunter_heading,
              target_measurement,
              max_distance,
              OTHER=None):
    # This function will be called after each time the target moves.

    # The OTHER variable is a place for you to store any historical information about
    # the progress of the hunt (or maybe some localization information). Your return format
    # must be as follows in order to be graded properly.
    if not OTHER:
        OTHER = robot()
        OTHER.distance_history = []
    distance_history = OTHER.distance_history

    beta = get_heading(OTHER.sense(), target_measurement)
    distance = distance_between(target_measurement, OTHER.sense())
    distance_history.append(distance)
    mean_distance = sum(distance_history) * 1.0 / len(distance_history)
    turning = beta - OTHER.heading
    heading = beta + turning
    xp = target_measurement[0] + mean_distance * cos(heading)
    yp = target_measurement[1] + mean_distance * sin(heading)
    OTHER = robot(target_measurement[0], target_measurement[1], beta)
    OTHER.distance_history = distance_history
    xy_estimate = (xp, yp)

    heading_to_target = get_heading(hunter_position, xy_estimate)
    turning = heading_to_target - hunter_heading
    distance = distance_between(hunter_position, xy_estimate)
    if distance_between(hunter_position, xy_estimate) >= max_distance:
        distance = max_distance
        turning += pi / 6
    return turning, distance, OTHER
Пример #4
0
def estimate_next_pos(measurement, OTHER=None):
    """Estimate the next (x, y) position of the wandering Traxbot
    based on noisy (x, y) measurements."""
    alpha1 = None
    if (OTHER == None):
        r = robot(measurement[0], measurement[1],
                  random.random() * 90,
                  random.random() * 90, 1)

    elif (OTHER[1] == None):

        old_x, old_y = OTHER[0]
        x, y = measurement
        distance = distance_between(measurement, OTHER[0])
        alpha1 = atan2((y - old_y), (x - old_x))
        r = robot(measurement[0], measurement[1], alpha1,
                  random.random() * 90, distance)

    else:
        alpha1 = OTHER[1]
        old_x, old_y = OTHER[0]
        x, y = measurement
        distance = distance_between(measurement, OTHER[0])
        alpha2 = atan2((y - old_y), (x - old_x))
        turn = abs(alpha2 - alpha1)
        r = robot(measurement[0], measurement[1], alpha2, turn, distance)
        #print(r.sense())

    r.move_in_circle()
    x, y = r.sense()
    xy_estimate = [x, y]
    OTHER = [measurement, alpha1]
    # You must return xy_estimate (x, y), and OTHER (even if it is None)
    # in this order for grading purposes.
    return xy_estimate, OTHER
Пример #5
0
def target_position_pred(target_measurement, OTHER=None):
    if OTHER is None:
        target_xy_pred = target_measurement
        number_measurement = 0
        heading = 0
        total_t = 0
        total_d = 0
        pred_robot = robot(0, 0, 0, 0, 0)
    else:
        prev_number_measurement = OTHER[0]
        prev_measurement = OTHER[1]
        prev_heading = OTHER[2]
        total_t = OTHER[3]
        total_d = OTHER[4]

        number_measurement = prev_number_measurement + 1
        d = distance_between(target_measurement, prev_measurement)
        total_d += d
        average_d = total_d / number_measurement

        heading = atan2(target_measurement[1] - prev_measurement[1],
                        target_measurement[0] - prev_measurement[0])
        t = heading - prev_heading
        total_t += angle_trunc(t)
        average_t = total_t / number_measurement

        pred_robot = robot(target_measurement[0], target_measurement[1],
                           heading, average_t, average_d)
        pred_robot.move_in_circle()
        target_xy_pred = pred_robot.sense()
    OTHER = [
        number_measurement, target_measurement, heading, total_t, total_d,
        target_xy_pred
    ]
    return pred_robot, OTHER
Пример #6
0
def next_move(hunter_position, hunter_heading, target_measurement, max_distance, OTHER = None):
    if not OTHER:
        OTHER = robot()        
        OTHER.distance_history = []
    distance_history = OTHER.distance_history
    
    beta = get_heading(OTHER.sense(), target_measurement)
    distance = distance_between(target_measurement, OTHER.sense())
    distance_history.append(distance)
    mean_distance = sum(distance_history) * 1.0 / len(distance_history)
    turning = beta - OTHER.heading
    heading = beta + turning
    xp = target_measurement[0] + mean_distance * cos(heading)
    yp = target_measurement[1] + mean_distance * sin(heading)
    OTHER = robot(target_measurement[0], target_measurement[1], beta)
    OTHER.distance_history = distance_history
    xy_estimate = (xp, yp)
    
    heading_to_target = get_heading(hunter_position, xy_estimate)
    turning = heading_to_target - hunter_heading
    distance = distance_between(hunter_position, xy_estimate)
    if distance >= max_distance:
        distance = max_distance
        turning += pi / 6
        
    return turning, distance, OTHER
 def __init__(self, factor = 4, fname="../../calibration_data/gauze_pts.p", simulate=False):
     self.psm1 = robot("PSM1")
     self.psm2 = robot("PSM2")
     self.pts = self.interpolation(self.load_robot_points(fname), factor)
     self.factor = factor
     self.nextpos = rospy.Publisher("/cutting/next_position_cartesian", Pose)
     self.simulate = simulate
     self.traj = []
Пример #8
0
def test_init():
    global r1, r2
    r1 = robot('PSM1')
    r2 = robot('PSM2')

    r1.home()
    r2.home()

    return r1, r2
Пример #9
0
    def __init__(self, ut=1 / 50):
        robot1 = robot(diam=30, m=0.1, fatr=1, ut=ut)
        robot2 = robot(fatr=1, ut=ut, x=50, y=50)

        self.robots = [robot1, robot2]
        self.width = 1000
        self.height = 600
        self.display = None
        self.angle = 0
        self.elasticity = 0.6
        self.key = ''
Пример #10
0
 def __init__(self,
              factor=4,
              fname="../../calibration_data/gauze_pts.p",
              simulate=False):
     self.psm1 = robot("PSM1")
     self.psm2 = robot("PSM2")
     self.pts = self.interpolation(self.load_robot_points(fname), factor)
     self.factor = factor
     self.nextpos = rospy.Publisher("/cutting/next_position_cartesian",
                                    Pose)
     self.simulate = simulate
     self.traj = []
Пример #11
0
def start_listening(exit,interval=.01):
    pos1, pos2 = None, None
    grip1, grip2 = None, None
    directory = E.get()
    if not os.path.exists(directory):
        os.makedirs(directory)
        os.makedirs(directory + "/left_endoscope")
        os.makedirs(directory + "/right_endoscope")
    open(directory + "/psm1.p", "w+").close()
    open(directory + "/psm2.p", "w+").close()

    psm1 = robot("PSM1")
    psm2 = robot("PSM2")

    time.sleep(1)

    count = 0

    while not exit.is_set():
        f = open(directory + "/psm1.p", "a")
        f2 = open(directory + "/psm2.p", "a")
        t = rospy.get_rostime()
        t = t.secs + t.nsecs/1e9
        pose1 = psm1.get_current_cartesian_position()
        pose2 = psm2.get_current_cartesian_position()
        pos1 = pose1.position
        pos2 = pose2.position
        rot1 = [pose1.tb_angles.yaw_deg, pose1.tb_angles.pitch_deg, pose1.tb_angles.roll_deg]
        rot2 = [pose2.tb_angles.yaw_deg, pose2.tb_angles.pitch_deg, pose2.tb_angles.roll_deg]
        joint1 = psm1.get_current_joint_position()
        joint2 = psm2.get_current_joint_position()
        masterpose1 = psm1.get_master_position_cartesian_current()
        masterpose2 = psm1.get_master_position_cartesian_current()
        masterjoint1 = psm1.get_master_joint_current()
        masterjoint2 = psm1.get_master_joint_current()
        grip1 = [joint1[-1] * 180 / np.pi]
        grip2 = [joint2[-1] * 180 / np.pi]
        one = [t] + list(pos1) + rot1 + list(grip1) + list(joint1) + list(masterpose1) + list(masterjoint1)
        two = [t] + list(pos2) + rot2 + list(grip2) + list(joint2) + list(masterpose2) + list(masterjoint2)
        pickle.dump(one, f)
        pickle.dump(two, f2)

        f.close()
        f2.close()

        count += 1

        time.sleep(interval)
def initializeParticles(OTHER, measurement):
    particles = []
    avgTurn = 0
    avgDistance = 0

    distances = []
    headings = []
    turns = []
    for i in range(1, len(OTHER)):
        p1 = (OTHER[i][0], OTHER[i][1])
        p2 = (OTHER[i - 1][0], OTHER[i - 1][1])
        distances.append(distance_between(p1, p2))
        heading = atan2(p1[1] - p2[1], p1[0] - p2[0])
        headings.append(heading)

        if i > 1:
            turns.append(headings[i - 1] - headings[i - 2])

    avgDistance = sum(distances) / len(distances)
    avgTurn = sum(turns) / len(turns)
    print(avgTurn, avgDistance)
    predictedHeading = angle_trunc(headings[-1] + avgTurn)

    for i in range(500):
        x = random.uniform(measurement[0] - 2, measurement[0] + 2)
        y = random.uniform(measurement[1] - 2, measurement[1] + 2)
        heading = headings[-1]
        turning = avgTurn
        distance = avgDistance
        new_robot = robot(x, y, heading, turning, distance)
        new_robot.set_noise(0, 0, measurement_noise)
        particles.append(new_robot)

    return {'particles': particles, 'first': True}
Пример #13
0
def genera_filtro(num_particulas, balizas, real, centro=[2, 2], radio=2):
    # Creamos el array de robots
    robots = []
    for i in range(num_particulas):
        # Generamos para cada partícula un valor aleatorio entre el círculo inicial
        # y un radio. Para la orientación del robot también generamos un valor aleatorio
        x_random_value = random.random()
        if (x_random_value > 0.5):
            x_random = centro[0] + random.random() * radio
        else:
            x_random = centro[0] - random.random() * radio
        y_random_value = random.random()
        if (y_random_value > 0.5):
            y_random = centro[1] + random.random() * radio
        else:
            y_random = centro[1] - random.random() * radio

        orientation_random = random.random() * 2 * pi
        # Creamos el robot y calculamos su peso asociado.
        new_robot = robot()
        new_robot.set(x_random, y_random, orientation_random)
        new_robot.set_noise(.01, .01, .01)
        new_robot.measurement_prob(real.sense(balizas), balizas)
        # Añadimos el robot al conjunto.
        robots.append(new_robot)
    return robots
Пример #14
0
def main():
	position_check = ""
	while (position_check != "yes"):
		if position_check == 'q':	# Check if the user wants to exit the program
			print "I guess we don't feel like it right now :/"
			return
		#measurements =   [[-1,3.75,3.75],[-1,3.75,3.75],
		#				  [-1,3.75,3.75],[-1,3.75,3.75]] 
		#motions = [0,0,0,0]
		[motions, measurements] = wander_loop(50)
		pos = particle_filter(motions, measurements, 20000, [45,75,pi])
	
		print pos		
		position_check = raw_input("Is this Correct? (answer MUST be 'yes' for correct):")
		#pos = [35,35,pi]
	
	r = robot()
	theta = pos[2]
	if (theta >= 7*pi/4 or theta < pi/4):
		theta = 0
	elif (theta >= pi/4 and theta < 3*pi/4):
		theta = pi/2
	elif (theta >= 3*pi/4 and theta <5*pi/4):
		theta = pi
	else:
		theta = 3*pi/2
	r.set(pos[0], pos[1], theta)
	goal = r.goal01
	policy = r.dynamic_programming(goal)
	r.execute_path(policy, goal)
Пример #15
0
def run(params, printflag = False):
    myrobot = robot()
    myrobot.set(0.0, 1.0, 0.0)
    speed = 1.0
    err = 0.0
    int_crosstrack_error = 0.0
    N = 100
    # myrobot.set_noise(0.1, 0.0)
    myrobot.set_steering_drift(10.0 / 180.0 * pi) # 10 degree steering error


    crosstrack_error = myrobot.y


    for i in range(N * 2):

        diff_crosstrack_error = myrobot.y - crosstrack_error
        crosstrack_error = myrobot.y
        int_crosstrack_error += crosstrack_error

        steer = - params[0] * crosstrack_error  \
            - params[1] * diff_crosstrack_error \
            - int_crosstrack_error * params[2]
        myrobot = myrobot.move(steer, speed)
        if i >= N:
            err += (crosstrack_error ** 2)
        if printflag:
            print myrobot, steer
    return err / float(N)
def moveRobotDesiredPose(robotName,filename):
	r = robot(robotName)
	gripper1_Affines = playfileReader(filename)
	
	for goalPose in range(len(gripper1_Affines)):
		print gripper1_Affines[goalPose]
		r.move_cartesian_frame(gripper1_Affines[goalPose],True)
Пример #17
0
def polygon(robotName):
    r = robot(robotName)

    input_sides = raw_input('How many sides : ')
    sides = int(input_sides)
    input_length = raw_input('What is the length : ')
    length = float(input_length)

    #get the interior degree of the polygon, in terms of radians
    interior_degrees = 360 / sides
    interior_radians = math.radians(interior_degrees)

    #find the distance of one point from the center
    interior_div2 = interior_radians / 2
    distance_from_center = length / math.sin(interior_div2)

    #calcuate where the next position should be, based on the previous position
    current_angle = 0
    while (current_angle <= (2 * math.pi)):
        #we made this program, based on a unit circle
        x_position = distance_from_center * math.cos(current_angle)
        y_position = distance_from_center * math.sin(current_angle)
        vec_1 = Vector(x_position, y_position, -0.15)
        r.move_cartesian_translation(vec_1, True)
        current_angle += interior_radians
def initializeData(measurement):
    particles = []
    averageHeading = 0
    averageTurning = 0
    averageDistance = 0
    first = True
    for i in range(500):
        x = random.uniform(measurement[0] - 2, measurement[0] + 2)
        y = random.uniform(measurement[1] - 2, measurement[1] + 2)
        heading = 0.5
        turning = 2 * pi / 34.0
        distance = 1.5
        new_robot = robot(x, y, heading, turning, distance)
        new_robot.set_noise(0, 0, measurement_noise)
        particles.append(new_robot)

    data = {
        "particles": particles,
        "averageHeading": averageHeading,
        "averageTurning": averageTurning,
        "averageDistance": averageDistance,
        "first": first
    }

    return data
Пример #19
0
def estimate_next_pos(measurement, OTHER=None):
    """Estimate the next (x, y) position of the wandering Traxbot
    based on noisy (x, y) measurements."""

    if not OTHER:
        OTHER = []

    OTHER.append(measurement)

    if len(OTHER) >= 3:
        p0 = OTHER[-3]
        p1 = OTHER[-2]
        p2 = OTHER[-1]
        heading1 = get_heading(p0, p1)
        heading2 = get_heading(p1, p2)
        turning = heading2 - heading1
        distance = distance_between(p1, p2)

        # prediction
        r = robot(p2[0], p2[1], heading2, turning, distance)
        r.move_in_circle()
        xy_estimate = [r.x, r.y]
    else:
        xy_estimate = measurement

    return xy_estimate, OTHER
Пример #20
0
def init_sphere():
    population_info={}
    for people in range(population):
        robot_sim=robot(init_pos=init_pos,nMass=nMass,edge=edge_length)
        mass=robot_sim.cube()
        population_info.update({people:{'sphere':robot_sim.genSphere(mass),'fitness':0}})
    return population_info
Пример #21
0
def estimate_next_pos(measurement, OTHER = None):
    """Estimate the next (x, y) position of the bot
    based on (x, y) measurements."""
    if OTHER == None:
        OTHER = {'xy':measurement} #for trackin the previous (x,y)
        xy_estimate = (0.0,0.0)
    else:
        #calculate the heading of current bot
        distance = distance_between(OTHER['xy'],measurement) #distance between the 2 points
        heading = acos((measurement[0]-OTHER['xy'][0])/distance)
        if (measurement[1]-OTHER['xy'][1])<0:
            heading = -1*heading

        #print measurement, OTHER['xy'], heading*180/pi
        # Finding the turning direction
        if 'heading' not in OTHER.keys():
            turning = None
            xy_estimate = (0., 0.)
        else:
            turning = heading - OTHER['heading']

            dummy = robot(measurement[0], measurement[1], heading, turning, distance)
            dummy.set_noise(0., 0., 0.)
            dummy.move_in_circle()
            xy_estimate = (dummy.x, dummy.y)
        OTHER['heading'] = heading
        OTHER['xy'] = measurement
    # You must return xy_estimate (x, y), and OTHER (even if it is None)
    # in this order for grading purposes.
    return xy_estimate, OTHER
Пример #22
0
def getters(robotName):
    """Here is where we initialize the robot, in this demo we called the robot r, and show how to use each methods.

    :param robotName: the name of the robot used """
    r = robot(robotName)

    #get_desired_cartesian_position() will return the desired positon
    #of the robot in terms of cartesian coordinates
    print 'get_desired_cartesian_position() : \n', r.get_desired_cartesian_position(
    )

    #get_current_cartesian_position() will return the current positon
    #of the robot in terms of cartesian coordinates
    print 'get_current_cartesian_position() : \n', r.get_current_cartesian_position(
    )

    #get_joint_number() will return the number of joints on the
    #specific arm used
    print 'get_joint_number() : ', r.get_joint_number()

    #get_desired_joint_position() will return the desired positon
    #of the robot in terms of joint coordinates
    print 'get_desired_joint_position() : \n', r.get_desired_joint_position()

    #get_current_joint_position() will return the current positon
    #of the robot in terms of joint coordinate
    print 'get_current_joint_position() : \n', r.get_current_joint_position()
Пример #23
0
def estimate_next_pos(measurement, OTHER = None):
    """Estimate the next (x, y) position of the wandering Traxbot
    based on noisy (x, y) measurements."""

    # You must return xy_estimate (x, y), and OTHER (even if it is None) 
    # in this order for grading purposes.
    if not OTHER:
        from collections import deque
        OTHER = {}
        # OTHER["robot"] = robot(measurement[0], measurement[0])
        OTHER["heading"] = deque([], 2)
        OTHER["distance"] = deque([], 2)

    OTHER["distance"].append(measurement)
    distance = distance_between(OTHER["distance"][-1], OTHER["distance"][0])

    heading = atan2(OTHER["distance"][-1][1] - OTHER["distance"][0][1], OTHER["distance"][-1][0] - OTHER["distance"][0][0])
    OTHER["heading"].append(heading)

    turing = OTHER["heading"][-1] - OTHER["heading"][0]
    
    r = robot(measurement[0], measurement[1], heading=heading)
    r.move(turing, distance)
    xy_estimate = (r.x, r.y)
    
    
    return xy_estimate, OTHER
Пример #24
0
 def __init__(self,n):
     self.robots=[]
     self.popsize=n
     for i in range(self.popsize):
         self.robots.append(robot())
     self.matingpool=[]
     self.succes=False
Пример #25
0
def getters(robotName):
    """Here is where we initialize the robot, in this demo we called the robot r, and show how to use each methods.

    :param robotName: the name of the robot used """
    r = robot(robotName)

    #get_desired_cartesian_position() will return the desired positon
    #of the robot in terms of cartesian coordinates
    print 'get_desired_cartesian_position() : \n', r.get_desired_cartesian_position()

    #get_current_cartesian_position() will return the current positon
    #of the robot in terms of cartesian coordinates
    print 'get_current_cartesian_position() : \n', r.get_current_cartesian_position()

    #get_joint_number() will return the number of joints on the
    #specific arm used
    print 'get_joint_number() : ', r.get_joint_number()

    #get_desired_joint_position() will return the desired positon
    #of the robot in terms of joint coordinates
    print 'get_desired_joint_position() : \n', r.get_desired_joint_position()

    #get_current_joint_position() will return the current positon
    #of the robot in terms of joint coordinate
    print 'get_current_joint_position() : \n', r.get_current_joint_position()
Пример #26
0
def polygon(robotName):
    r = robot(robotName)

    input_sides = raw_input('How many sides : ')
    sides = int(input_sides)
    input_length = raw_input('What is the length : ')
    length = float(input_length)

    #get the interior degree of the polygon, in terms of radians
    interior_degrees = 360/sides
    interior_radians = math.radians(interior_degrees)
    
    #find the distance of one point from the center
    interior_div2 = interior_radians/2
    distance_from_center = length/math.sin(interior_div2)
    
    #calcuate where the next position should be, based on the previous position
    current_angle = 0
    while(current_angle <= (2*math.pi)):
        #we made this program, based on a unit circle
        x_position = distance_from_center*math.cos(current_angle)
        y_position = distance_from_center*math.sin(current_angle)
        vec_1 = Vector(x_position,y_position, -0.15)
        r.move_cartesian_translation(vec_1,True)
        current_angle+=interior_radians
Пример #27
0
def pickup2(robotname):
    r = robot(robotname)
    r.move_cartesian([0.0, 0.0, -0.05])

    r.move_joint_list([0.0, 0.0, 0.15, 0.0, 0.0, 0.0, 0.0],
                      [0, 1, 2, 3, 4, 5, 6])

    r.open_gripper()
    time.sleep(1)
    r.delta_move_joint_list([math.pi / 4], [0])
    r.delta_move_joint_list([0.05], [2])
    r.close_gripper()
    time.sleep(1)
    r.delta_move_joint_list([-0.05], [2])
    r.delta_move_joint_list([-math.pi / 4], [0])
    r.open_gripper()
    time.sleep(1)

    r.delta_move_joint_list([-math.pi / 4], [0])
    r.delta_move_joint_list([0.05], [2])
    r.close_gripper()
    time.sleep(1)
    r.delta_move_joint_list([-0.05], [2])
    r.delta_move_joint_list([math.pi / 4], [0])
    r.open_gripper()
Пример #28
0
def estimate_next_pos(measurement, OTHER = None):
    """Estimate the next (x, y) position of the wandering Traxbot
    based on noisy (x, y) measurements."""
    if not OTHER: # this is the first measurement
        OTHER = []

    OTHER.append(measurement)   

    if len(OTHER) <= 2: #insufficient # of coordinates
        xy_estimate = measurement
    else:
        #Determine 'distance' and 'turning' between coordinates
        coordinate0 = OTHER[0]
        coordinate1 = OTHER[1]
        coordinate2 = OTHER[2]
        heading01 = calculateHeading(coordinate0, coordinate1)
        heading12 = calculateHeading(coordinate1, coordinate2)
        turning = heading12 - heading01
        distance = distance_between(coordinate1, coordinate2)
        
        #Assume 'distance' and 'turning' is the same for every pair of coordinates

        #Create new robot at last measurement location
        r = robot(measurement[0], measurement[1], heading12, turning, distance)
        r.set_noise(test_target.turning_noise, test_target.distance_noise, test_target.measurement_noise)
        r.move_in_circle()
        xy_estimate = (r.x, r.y)

    # You must return xy_estimate (x, y), and OTHER (even if it is None) 
    # in this order for grading purposes.
    return xy_estimate, OTHER 
Пример #29
0
def plotting_cartesian(robotName):
    """Here is where we initialize the robot, in this demo we called the robot r, and we move plot the current position in cartesian space.

    :param robotName: the name of the robot used """
    global ro
    global ax
    global fig
    ro = robot(robotName)

    #here we are declaring what type of graph we are using
    #in this case we will be using a 3D graph
    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')
    #here is where we set the title of the graph
    ax.set_title("Cartesian Position over time")

    #here is where we set the limits for the axis of the graph
    ax.set_xlim3d(0, 10)
    ax.set_ylim3d(0, 10)
    ax.set_zlim3d(0, 10)

    #here is where we set the x,y,z labels for the graph
    ax.set_xlabel('x - axis')
    ax.set_ylabel('y - axis')
    ax.set_zlabel('z - axis')

    #here is where we start the timer
    timer = fig.canvas.new_timer(interval=0)
    timer.add_callback(update_data, ())
    timer.start()

    plt.show(False)
Пример #30
0
def testing(robotName):
	r = robot(robotName)
	for i in range (0, 10):
		if i % 2 == 0:
			sign = 1.0
		else:
			sign = -1.0
		r.delta_move_cartesian([sign*(0.1),0.0,0.0])
		r.delta_move_cartesian([sign*(-0.1),0.0,0.0])

	for i in range (0, 10):
		if i % 2 == 0:
			sign = 1.0
		else:
			sign = -1.0
		r.delta_move_cartesian([0.0,sign*(0.1),0.0])
		r.delta_move_cartesian([0.0,sign*(-0.1),0.0])

	for i in range (0, 10):
		if i % 2 == 0:
			sign = 1.0
		else:
			sign = -1.0
		r.delta_move_cartesian([0.0,0.0,sign*(0.1)])
		r.delta_move_cartesian([0.0,0.0,sign*(-0.1)])
Пример #31
0
def plotting_cartesian(robotName):
    """Here is where we initialize the robot, in this demo we called the robot r, and we move plot the current position in cartesian space.

    :param robotName: the name of the robot used """
    global ro
    global ax
    global fig
    ro = robot(robotName)

    #here we are declaring what type of graph we are using
    #in this case we will be using a 3D graph
    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')
    #here is where we set the title of the graph
    ax.set_title("Cartesian Position over time")

    #here is where we set the limits for the axis of the graph
    ax.set_xlim3d(0,10)
    ax.set_ylim3d(0,10)
    ax.set_zlim3d(0,10)

    #here is where we set the x,y,z labels for the graph
    ax.set_xlabel('x - axis')
    ax.set_ylabel('y - axis')
    ax.set_zlabel('z - axis')

    #here is where we start the timer
    timer = fig.canvas.new_timer(interval=0)
    timer.add_callback(update_data, ())
    timer.start()

    plt.show(False)
Пример #32
0
def particle_filter(motions, measurements, N=50):
    p = []
    for i in range(N):
        r = robot()
        r.set_noise(0.0, 0.0, 0.0)
        p.append(r)
    for t in range(len(motions)):
        p2 = []
        for i in range(N):
            p2.append(p[i].move(motions[t]))
        p = p2
        
        # measurement update
        w = []
#        for i in range(N):
#w.append(p[i].measurement_prob(measurements[t]))
        
        # resampling
        p3 = []
        index = int(random.random() * N)
        beta = 0.0
        mw = max(w)
        for i in range(N):
            beta += random.random() * 2.0 * mw
            while beta > w[index]:
                beta -= w[index]
                index = (index + 1) % N
            p3.append(p[index])
        p = p3
    
    return get_position(p)
Пример #33
0
def estimate_next_pos(measurement, OTHER = None):

    xy_estimate = measurement

    if OTHER is None:
        distances = []
        angles = []
        coords = []
        turnAngle = []
    else:
        distances, angles, coords, turnAngle = OTHER

        if len(coords) == 1:
            hypotenuse1 = distance_between(coords[0], measurement)
            distances.append(hypotenuse1)

        elif len(coords) >= 2:
            point1 = coords[len(coords) - 2]
            point2 = coords[len(coords) - 1]
            point3 = measurement

            turnAngle.append(calculateRotationDirection(point1[0], point1[1], point2[0], point2[1], point3[0], point3[1]))
            rotationSign = getRotationSign(turnAngle)

            y1Delta = point2[1] - point1[1]
            hypotenuse1 = distance_between(point1, point2)

            # try:
            #     headingAngleAvg1 = asin(y1Delta / avgDT)
            # except:
            #     #print "avgDT", avgDT
            headingAngleAvg1 = asin(y1Delta / hypotenuse1)

            y2Delta = point3[1] - point2[1]
            x2Delta = point3[0] - point2[0]
            hypotenuse2 = distance_between(point2, point3)

            # try:
            #     headingAngleAvg2 = asin(y2Delta / avgDT)
            # except:
            #     #print "avgDT", avgDT
            headingAngleAvg2 = asin(y2Delta / hypotenuse2)
            headingAngle2 = atan2(y2Delta, x2Delta)
            predictedTurnAngleAvg = headingAngleAvg2 - headingAngleAvg1

            angles.append(abs(predictedTurnAngleAvg))
            distances.append(hypotenuse2)

            avgDT = sum(distances)/len(distances)
            avgAngle = sum(angles)/len(angles)

            newR = robot(point3[0], point3[1], headingAngle2, rotationSign * avgAngle, avgDT)
            newR.move_in_circle()
            xy_estimate = newR.x, newR.y

    coords.append(measurement)
    OTHER = (distances, angles, coords, turnAngle)

    return xy_estimate, OTHER
Пример #34
0
def particle_filter(motions, measurements, N, plist, noise_factor,
                    measurement_thetha, std_thetha, step):
    p_result = []
    #print('')
    #print(plist, 'printing whats coming in')
    # motion update (prediction)
    p = []
    for i in range(N):
        r = robot(plist[i][0], plist[i][1], plist[i][2])
        r.set_noise(0, 0, noise_factor * motions[1])
        p.append(r)

    if step > 1:
        p2 = []
        #print('print p before move', p)

        for i in range(N):

            p[i].move(motions[0], motions[1])

        #print(p, 'P after move', motions)

    # measurement update
        w = []

        for i in range(N):

            w.append(
                measurement_prob(p[i].sense(), measurements,
                                 noise_factor * motions[1], measurement_thetha,
                                 std_thetha, p[i].heading))
        #print(w, 'weights')

    # resampling
        p3 = []
        h3 = []
        index = int(random.random() * N)
        beta = 0.0
        mw = max(w)
        for i in range(N):
            beta += random.random() * 2.0 * mw
            while beta > w[index]:
                beta -= w[index]
                index = (index + 1) % N
            p3.append(p[index])

        p = p3

        p_result = []

        for i in range(len(p)):

            p_result.append([p[i].x, p[i].y, p[i].heading])
        #print('final p', p_result)def next_move(hunter_position, hunter_heading, target_measurement, max_distance, OTHER = None):
        #print('')

        return p_result
    else:
        return plist
 def init_robots(self):
     for i in range(self.params['nBots']):
         self.robots[i] = (robot(self,
                                 i,
                                 self.base.position,
                                 radius=self.params["r_rad"],
                                 dT=self.dT,
                                 init_position=self.base.position))
Пример #36
0
def effort_test(robotName):
    r=robot(robotName)
 
    while(r.get_desired_joint_effort()[2] < 1):
        print r.get_desired_joint_effort()[2]
        r.delta_move_cartesian_translation([0.0,0.0,-0.001])
        time.sleep(.3)
    print r.get_desired_joint_effort()[2]
Пример #37
0
def next_move(hunter_position,
              hunter_heading,
              target_measurement,
              max_distance,
              OTHER=None):
    # This function will be called after each time the target moves.

    if not OTHER:  # first time
        target_measurements = [target_measurement]
        P = matrix(
            [[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 1000., 0.],
             [0., 0., 0., 1000.]]
        )  # initial uncertainty: 0 for positions x and y, 1000 for the two velocities
        prev_heading = 0.
        OTHER = [target_measurements, P, prev_heading]
    else:
        target_measurements, P, prev_heading = OTHER

    #setup initial state
    initial_xy = target_measurement
    # initial state (location and velocity)
    x = matrix([[initial_xy[0]], [initial_xy[1]], [0.], [0.]])

    #Run kalman filter
    target_measurements_input = target_measurements[
        -15:]  #optimize to address "13 seconds CPU" error on Udacity
    x, P = kalman_filter.filter(x, P, target_measurements_input)
    filter_xy = (x.value[0][0], x.value[1][0])

    #Post-process
    prev_target_measurement = target_measurements[-1]
    #distance
    distance = distance_between(prev_target_measurement, filter_xy)
    #heading/turning
    heading = get_heading(prev_target_measurement, filter_xy)
    turning = heading - prev_heading

    r = robot(filter_xy[0], filter_xy[1], heading, turning, distance)
    r.set_noise(0, 0, 0)
    r.move_in_circle()
    target_estimate = (r.x, r.y)

    #Hunting time
    distance_hunter_target = distance_between(hunter_position, target_estimate)
    heading_hunter_target = get_heading(hunter_position, target_estimate)
    turning_hunter_target = heading_hunter_target - hunter_heading

    #Update OTHER for next call
    OTHER[0].append(filter_xy)
    OTHER[1] = P
    OTHER[2] = heading

    # The OTHER variable is a place for you to store any historical information
    # about
    # the progress of the hunt (or maybe some localization information).  Your
    # return format
    # must be as follows in order to be graded properly.
    return turning_hunter_target, distance_hunter_target, OTHER
Пример #38
0
def estimate_next_pos(measurement, OTHER=None):

    xy_estimate = measurement

    if OTHER is None:
        distances = []
        angles = []
        coords = []
    else:
        distances, angles, coords = OTHER

        if len(coords) == 1:
            hypotenuse1 = distance_between(coords[0], measurement)
            distances.append(hypotenuse1)

        elif len(coords) >= 2:

            point1 = coords[len(coords) - 2]
            point2 = coords[len(coords) - 1]
            point3 = measurement

            y1Delta = point2[1] - point1[1]
            hypotenuse1 = distance_between(point1, point2)
            headingAngleAvg1 = asin(y1Delta / hypotenuse1)

            y2Delta = point3[1] - point2[1]
            x2Delta = point3[0] - point2[0]
            hypotenuse2 = distance_between(point2, point3)
            headingAngle2 = atan2(y2Delta, x2Delta)
            headingAngleAvg2 = asin(y2Delta / hypotenuse2)

            predictedTurnAngleAvg = headingAngleAvg2 - headingAngleAvg1
            angles.append(predictedTurnAngleAvg)
            distances.append(hypotenuse2)

            avgDT = sum(distances) / len(distances)
            avgAngle = sum(angles) / len(angles)

            # create particles only after approximate turning and distance are known
            if len(particles) == 0:
                createParticles(measurement[0], measurement[1], avgAngle,
                                avgDT)

            Z = senseToLandmarks(measurement[0], measurement[1])
            xy_estimate = particle_filter(Z, avgAngle, avgDT)

            #print "avgAngle:", avgAngle
            newR = robot(xy_estimate[0], xy_estimate[1], headingAngle2,
                         avgAngle, avgDT)
            newR.move_in_circle()
            xy_estimate = newR.x, newR.y

        #print "headingAngle1", headingAngle1

    coords.append(measurement)
    OTHER = (distances, angles, coords)

    return xy_estimate, OTHER
Пример #39
0
def target_position_pred(target_measurement, OTHER=None):
    if OTHER is None:
        target_xy_pred = target_measurement
        number_measurement = 0
        heading = 0
        t_list = [0]
        d_list = [0]
        pred_robot = robot(0, 0, 0, 0, 0)
    else:
        prev_number_measurement = OTHER[0]
        prev_measurement = OTHER[1]
        prev_heading = OTHER[2]
        t_list = OTHER[3]
        d_list = OTHER[4]

        number_measurement = prev_number_measurement + 1
        d = distance_between(target_measurement, prev_measurement)
        d_list.append(d)
        total_d = sum(d_list)
        average_d = total_d / number_measurement

        heading = atan2(target_measurement[1] - prev_measurement[1],
                        target_measurement[0] - prev_measurement[0])
        t = heading - prev_heading
        t = angle_trunc(t)
        t_list.append(t)
        total_t = sum(t_list)
        average_t = total_t / number_measurement

        tmp_d = tmp_t = 0
        for i in range(number_measurement):
            tmp_d += pow(d_list[i] - average_d, 2)
            tmp_t += pow(t_list[i] - average_t, 2) 
        delta_d = tmp_d / number_measurement
        delta_t = tmp_t / number_measurement

        print delta_d, average_d, delta_t, average_t

        pred_robot = robot(
            target_measurement[0], target_measurement[1], heading, average_t, average_d)
        pred_robot.move_in_circle()
        target_xy_pred = pred_robot.sense()
    OTHER = [number_measurement, target_measurement,
             heading, t_list, d_list, target_xy_pred]
    return pred_robot, OTHER
Пример #40
0
def estimate_next_pos(measurement, OTHER=None):
    """Estimate the next (x, y) position of the wandering Traxbot
    based on noisy (x, y) measurements."""

    # You must return xy_estimate (x, y), and OTHER (even if it is None)
    # in this order for grading purposes.
    P_LENGTH = 500
    if not OTHER:
        OTHER = {}
        OTHER["p_list"] = []
        for i in range(P_LENGTH):
            r = robot(random.gauss(measurement[0], measurement_noise * 10),
                      random.gauss(measurement[1], measurement_noise * 10),
                      heading=(random.random() - 0.5) * 2 * pi,
                      turning=(random.random() - 0.5) * pi,
                      distance=random.gauss(1.5, 2))
            OTHER["p_list"].append(r)

    # prediction
    for p in OTHER["p_list"]:
        p.turning += random.gauss(0, pi / 20)
        p.distance += random.gauss(0, 0.01)
        p.move(p.turning, p.distance)

    # measure
    w = []
    for p in OTHER["p_list"]:
        w.append(
            measure_prob((p.x, p.y), (measurement[0], measurement[1]),
                         sqrt(measurement_noise)))

    # resampling
    p3 = []
    index = int(random.random() * P_LENGTH)
    beta = 0.0
    mw = max(w)

    for i in range(P_LENGTH):
        beta += random.random() * 2.0 * mw
        while beta > w[index]:
            beta -= w[index]
            index = (index + 1) % P_LENGTH
        p = deepcopy(OTHER["p_list"][index])
        p3.append(p)
    OTHER["p_list"] = p3

    # prediction
    p_list = deepcopy(OTHER["p_list"])
    for p in p_list:
        p.move(p.turning, p.distance)

    # estimation
    xy_estimate = (sum([x.x for x in p_list]) / P_LENGTH,
                   sum([x.y for x in p_list]) / P_LENGTH)
    # xy_estimate = (sum([x.x for x in OTHER["p_list"]]) / P_LENGTH,
    #                sum([x.y for x in OTHER["p_list"]]) / P_LENGTH)
    return xy_estimate, OTHER
Пример #41
0
    def ubicarRobot(self):
        while True:
                x = random.randint(0,self.n - 1)
                y = random.randint(0,self.m - 1)

                if self.habitacion[x][y].es_vacia():
                    self.robot = robot(x,y)
                    self.habitacion[x][y].annadir("R")
                    break
Пример #42
0
    def __init__(self):
        threading.Thread.__init__(self)
        self.q = Queue.Queue()
        self.lastSendTime = time.time()

        self.dash = robot("DC:86:F3:1F:C5:E9", False)
        self.connectStatus = DashCommandSender.DISCONNECTED

        self.isDisco = False
Пример #43
0
def run(param):
    myrobot = robot()
    myrobot.set(0.0, 1.0, 0.0)
    speed = 1.0 # motion distance is equal to speed (we assume time = 1)
    N = 100

    for i in range(N):
        steering = - param * myrobot.y
        myrobot = myrobot.move(steering, speed)
        print myrobot, steering
def joint_move(robotName):
    """Here is where we initialize the robot, in this demo we called the robot r, and we move the robot in joint space accordingly.

    :param robotName: the name of the robot used """
    r = robot(robotName)
#look at size
    print 'After, move to position [0,0,0.56,0,0,0,0]:'
    r.move_joint_list([0.0, 0.0, 0.0060, 0.0, 0.0, 0.0, 0.0], interpolate = False)
    print r.get_desired_joint_position()
    '''
Пример #45
0
    def pcopy(self, particles):
        #Helper function to copy robot class object
        copied = []
        for i in range(len(particles)):
            particle = particles[i]
            r = robot(particle.x, particle.y, particle.heading, particle.turning, particle.distance)
            r.set_noise(particle.turning_noise, particle.distance_noise, particle.measurement_noise)
            copied.append(r)

        return copied
def cartesian_move(robotName,read_playfile):
	r = robot(robotName)

	rospy.init_node('playfile_cartspace_test')

	infile = open(read_playfile,'r')
	infile.read()


	#print infile
	infile.close()
def moveRobotDesiredPose(robotName,filename):
	r = robot(robotName)
	gripper1_Affines = playfileReader(filename)
	r.move_cartesian_translation([0.0,0.0,-0.06])
	for goalPose in range(len(gripper1_Affines)):
		print goalPose
		print gripper1_Affines[goalPose]
		if (goalPose == 0):
			r.move_cartesian_frame(gripper1_Affines[goalPose],True)
		else:
			r.delta_move_cartesian_frame(gripper1_Affines[goalPose],True)
def Interpolate(robotname):
    R = robot(robotname)
    start_frame = [0.0447258528045, 0.0512065150661, -0.131469281192, -2.5396399475337668, -0.2903344173204437, 0.18226904308086755]      # Starting position in [X Y Z Roll Pitch Yaw]
    end_frame = [-0.0447258528045, 0.0512065150661, -0.131469281192, -2.5396399475337668, -0.2903344173204437, 0.18226904308086755]	  # Ending position in [X Y Z Roll Pitch Yaw]

    start_Vect = np.array([start_frame[0],start_frame[1],start_frame[2]])
    end_Vect = np.array([end_frame[0],end_frame[1],end_frame[2]])
    print "Norm: ", np.linalg.norm(end_Vect - start_Vect)				# Computes the norm of the vector between the start and end points

    R.move_cartersian_frame_linear_interpolation(start_frame, 0.01, False)              # (abs_frame, speed, cubic interpolation = False)
    time.sleep(1)
    R.move_cartersian_frame_linear_interpolation(end_frame, 0.01, False)               	# (abs_frame, speed, cubic interpolation = False)
Пример #49
0
def run(param1, param2):
    myrobot = robot()
    myrobot.set(0.0, 1.0, 0.0)
    speed = 1.0 # motion distance is equal to speed (we assume time = 1)
    N = 100

    prev_y = myrobot.y
    for i in range(N):
        steer = - param1 * myrobot.y - param2 * (myrobot.y - prev_y)
        prev_y = myrobot.y
        myrobot = myrobot.move(steer, speed)
        print myrobot, steer
Пример #50
0
def next_move(hunter_position, hunter_heading, target_measurement, max_distance, OTHER = None):		
	counter = 0
	turning = 0
	distance = 0
	lastPostiion = [0,0]
	particles = []
	
	N = 100
	if OTHER != None:
		print "Measurement = " + str(target_measurement)
		counter, lastPosition, particles = OTHER
		particles.sort()
		for i in range(len(particles)):
			print "Particle " + str(i) + " at " + str(particles[i].x) + ", " + str(particles[i].y) + " distance "  + str(distance_between(target_measurement, particles[i].sense()))
			
		return

	else:

		for i in range(50):
			rx = random.gauss(target_measurement[0], measurement_noise)
			ry = random.gauss(target_measurement[1], measurement_noise)
			rheading = random.gauss(0,2*pi)
			
			tempRobot = robot(rx, ry, rheading)
			#tempRobot.set_noise(0.0, 0.0, measurement_noise)
			particles.append(tempRobot)
		
		max_w = 0
		max_index = -1
		for j in range(len(particles)):
			dx = abs(particles[j].x - target_measurement[0])
			dy = abs(particles[j].y - target_measurement[1])
			w = 1/dx * ( 1/dy)
			print "Particle " + str(j) + " at " + str(particles[j].x) + ", " + str(particles[j].y) + " distance "  + str(distance_between(target_measurement, particles[j].sense()))  + " weight = " + str(w)
			if w > max_w:
				max_w = w
				max_index = j
			

		
		print "Measurement = " + str(target_measurement)

			
		print "Max weight is " + str(max_w) + " distance is " + str(distance_between(target_measurement, particles[max_index].sense())) + " index  = " + str(max_index)
		return
			
	counter += 1
	
	OTHER = [counter, target_measurement, particles]
	print "Counter " + str(counter)
	return turning, distance, OTHER
Пример #51
0
def square(robotName):
    r = robot(robotName)
    
    r.move_cartesian_translation([0.0,0.0,-0.15])
    #the length of the square
    input_length = raw_input('Enter the length of the square : ')
    length = float(input_length)

    #move in cartesian space
    r.move_cartesian_translation([0.0,length, -0.15])
    r.move_cartesian_translation([length, length, -0.15])
    r.move_cartesian([length, 0.0, -0.15])
    r.move_cartesian([0.0, 0.0, -0.15])
Пример #52
0
def estimate_next_pos(measurement, OTHER = None):
	mu = 0
	sigma = 0
	counter = 0
	lastPosition = [0, 0]
	tmu = 0
	tsig = 0
	lheading = 0
	heading = 0
	tsig = 0
	tmu = 0
	lastheading = 0
    
 	if OTHER == None:
		sigma = 10000
		tsig = 10000
		       
	if OTHER != None:
		mu, sigma, lastheading, tmu, tsig, counter, lastPosition = OTHER
	 
	counter += 1
	db = distance_between(lastPosition, measurement)
	
	dy = measurement[1] - lastPosition[1]
	dx = measurement[0] - lastPosition[0]
	thisheading = calculateHeading(dx, dy)
	
	turning = (thisheading - lastheading)  
	if turning < 0:
		turning = lastheading - thisheading
	
	
	if getQuadrant(lastheading) == 4 and getQuadrant(thisheading) == 1:
		turning = thisheading + ( pi*2 - abs(lastheading))
	
	turning = turning % (2 * pi)
	
	

	mu, sigma = update(mu, sigma, db, measurement_noise)
	tmu, tsig = update(tmu, tsig, turning, measurement_noise)
	
	tRobot = robot(measurement[0], measurement[1], heading)
	#tRobot.set_noise(0.0, 0.0, measurement_noise)
	tRobot.move(tmu, mu)
	xy_estimate = tRobot.sense()
	
	OTHER = [mu, sigma, thisheading, tmu, tsig, counter, measurement ]
	

	return xy_estimate, OTHER
Пример #53
0
def add_chess(i):
	if owner == 'player':
	 	if check_click(i, allclickchess) == 'yes':
	 		playaudio('undo.wav')
	 		return 'no'
	 	set_xml('toptext', 'text', '电脑(白)下子')
		playerchess.append(i)
		allclickchess.append(i)
		colour = 'black'
		global owner
		owner = 'computer'

	set_xml('btn_' + i, 'background', 'file://' + IMAGEPATH + 'game_chess_' + colour + '.bmp')
	set_list('玩家下子:' + i)
	playaudio('stone.wav')
	#检查是否获胜
	if Check_Win(playerchess, i).check() == 'yes':
		iswin('player')
	if check_draw(allclickchess) == 'yes':
		isdraw()
		
		
	robot(i)
Пример #54
0
def run(param1, param2, param3):
    myrobot = robot()
    myrobot.set(0.0, 1.0, 0.0)
    speed = 1.0 # motion distance is equal to speed (we assume time = 1)
    N = 100
    prev_y = myrobot.y
    sum_crosstrack_errors = 0

    myrobot.set_steering_drift(10.0 / 180.0 * pi) # 10 degree bias, this will be added in by the move function, you do not need to add it below!

    for i in range(N):
        # steering = -tau_p * CTE - tau_d * diff_CTE - tau_i * int_CTE
        steering = - param1 * myrobot.y - param2 * (myrobot.y - prev_y) - param3 * sum_crosstrack_errors
        sum_crosstrack_errors += myrobot.y
        prev_y = myrobot.y
        myrobot = myrobot.move(steering, speed)
        print myrobot
Пример #55
0
    def init_particles(self,N, distance, turning, t_noise, m_noise, d_noise):
        #Initialize particles randomly within the box's dimension

        particles = []

        for i in range(N):
            x_init = random.random() * self.maxX
            y_init = random.random() * self.maxY
            h_init = random.random() * 2*pi
            d_init = random.gauss(distance, d_noise)
            t_init = angle_trunc(random.gauss(turning, t_noise))
            r = robot(x=x_init, y=y_init, heading=h_init, turning=t_init,distance=d_init)
            r.set_noise(t_noise, d_noise, m_noise)
            particles.append(r)

        #print "particles made"
        return particles
Пример #56
0
def pickup(robotname):
    r = robot(robotname)
    r.move_cartesian([0.0,0.0,-0.05])

    i = 0
    while i < 3:
        r.move_cartesian([-0.025+(i*.025),0.0,-0.05])
        r.delta_move_cartesian([0.0,0.05,0.0])
        r.open_gripper()
        r.delta_move_cartesian([0.0,0.0,-0.01])
        r.close_gripper()
        time.sleep(3)
        r.delta_move_cartesian([0.0,0.0,0.01])        
        r.move_cartesian([0.0,0.0,-0.05])
        r.open_gripper()
        i+=1
    r.move_cartesian([0.0,0.0,-0.05])
Пример #57
0
def estimate_next_pos(measurement, OTHER = None):
    N = 100
    if not OTHER:
        x, y = measurement
        particles = []
        for i in range(N):
            r = robot(random.gauss(x, 1), random.gauss(y, 1), random.random() * 2.0 * pi) 
            r.set_noise(0.1, 0.1, 0.25)
            particles.append(r)
        OTHER = [measurement, 0.0, particles]
        xy_estimate = measurement        
    else:
        prev_x, prev_angle, particles = OTHER

        # measurement update
        w = [measurement_prob(p, measurement) for p in particles]

        # resampling
        new_p = []
        index = int(random.random() * N)
        beta = 0.0
        mw = max(w)
        for i in range(N):
            beta += random.random() * 2.0 * mw
            while beta > w[index]:
                beta -= w[index]
                index = (index + 1) % N
            new_p.append(deepcopy(particles[index]))
        estimate = get_position(new_p)

        x, y = estimate 
        diffX = x - prev_x[0]
        diffY = y - prev_x[1]
        angle = atan2(diffY, diffX)
        turn_rate = angle_trunc(angle - prev_angle)
        step = distance(x, y, prev_x[0], prev_x[1])

        # motion update (prediction)
        for p in new_p:
            p.move(turn_rate, step)
            p.measurement_noise -= 0.10
            p.measurement_noise = max(0.15, p.measurement_noise)

        xy_estimate = get_position(new_p)
        OTHER = [estimate, angle, new_p]
    return xy_estimate, OTHER