Esempio n. 1
0
    def location_report(self, tarloc, taralti):
        # get target and vehicle locations
        self.targetLoc = PositionVector()
        self.vehicleLoc = PositionVector()
        self.vehicleLoc.set_from_location(drn.get_location())

        full_loc = Location(float(tarloc[0]), float(tarloc[1]), float(taralti), is_relative=True)
        self.targetLoc.set_from_location(full_loc)
        loc = (
            "Vehicle location: "
            + "X: "
            + format(self.vehicleLoc.x)
            + "Y: "
            + format(self.vehicleLoc.y)
            + "Z: "
            + format(self.vehicleLoc.z)
        )
        alog.log.info(loc)

        target_loc = (
            "Target location: "
            + "X: "
            + format(self.targetLoc.x)
            + "Y: "
            + format(self.targetLoc.y)
            + "Z: "
            + format(self.targetLoc.z)
        )
        alog.log.info(target_loc)

        dist = self.vehicleLoc.get_distance_xyz(self.vehicleLoc, self.targetLoc)
        reld = "Relative distance: " + format(dist)
        alog.log.info(reld)
        return dist
Esempio n. 2
0
    def check_home(self):
        if self.home_initialised:
            return True

        if self.vehicle.gps_0.fix_type < 2:
            print "Waiting for GPS .......", self.vehicle.gps_0.fix_type
            return False

        if self.vehicle.location.global_relative_frame is None:
            print 'Waiting for vehicle position ...'
            return False

        if self.vehicle.location.global_relative_frame.lat is None or self.vehicle.location.global_relative_frame.lon is None or self.vehicle.location.global_relative_frame.alt is None:
            print 'Waiting for vehicle position ...'
            return False

        if self.mission_cmds is None:
            self.fetch_mission()
            return False

        if self.vehicle.home_location is None:
            print 'Waiting for home location ...'
            self.fetch_mission()
            return False

        PositionVector.set_home_location(
            LocationGlobal(self.vehicle.home_location.lat,
                           self.vehicle.home_location.lon, 0))
        self.home_initialised = True
        print 'Home location acquired'

        return self.home_initialised
Esempio n. 3
0
    def draw_fake_balloon(self, frame, veh_pos, balloon_pos, vehicle_roll,
                          vehicle_pitch, vehicle_yaw):
        # calculate bearing to balloon
        bearing_to_balloon = PositionVector.get_bearing(veh_pos, balloon_pos)
        yaw_to_balloon = balloon_utils.wrap_PI(bearing_to_balloon -
                                               vehicle_yaw)

        # calculate earth frame pitch angle from vehicle to balloon
        pitch_to_balloon = vehicle_pitch + PositionVector.get_elevation(
            veh_pos, balloon_pos)

        #print "Fake Balloon Bearing:%f Pitch:%f Dist:%f" % (math.degrees(bearing_to_balloon), math.degrees(pitch_to_balloon), dist_to_balloon_xy)

        # calculate pixel position of balloon
        balloon_x = balloon_video.angle_to_pixels_x(
            yaw_to_balloon) + balloon_video.img_center_x
        balloon_y = balloon_video.angle_to_pixels_y(
            pitch_to_balloon) + balloon_video.img_center_y

        # calculate size of balloon in pixels from distance and size
        dist_to_balloon_xyz = PositionVector.get_distance_xyz(
            veh_pos, balloon_pos)
        balloon_radius = balloon_utils.get_pixels_from_distance(
            dist_to_balloon_xyz, balloon_finder.balloon_radius_expected)

        # store balloon radius
        self.last_balloon_radius = balloon_radius

        # draw balloon
        cv2.circle(frame, (balloon_x, balloon_y), balloon_radius,
                   self.fake_balloon_colour_bgr_scalar, -1)
Esempio n. 4
0
    def __init__(self):
        self.targetLocation = PositionVector()
        self.vehicleLocation = PositionVector()

        self.backgroundColor = (74, 88, 109)

        #load target
        filename = sc_config.config.get_string(
            'simulator', 'target_location',
            '/home/dannuge/SmartCamera/target.jpg')
        target_size = sc_config.config.get_float('algorithm', 'outer_ring',
                                                 1.0)
        self.load_target(filename, target_size)

        #define camera
        self.camera_width = sc_config.config.get_integer(
            'camera', 'width', 640)
        self.camera_height = sc_config.config.get_integer(
            'camera', 'height', 640)
        self.camera_vfov = sc_config.config.get_float('camera', 'vertical-fov',
                                                      72.42)
        self.camera_hfov = sc_config.config.get_float('camera',
                                                      'horizontal-fov', 72.42)
        self.camera_fov = math.sqrt(self.camera_vfov**2 + self.camera_hfov**2)
        self.camera_frameRate = 30
	def inside_landing_area(self):
		return 1
		'''
		vehPos = PositionVector.get_from_location(veh_control.get_location())
		landPos = PositionVector.get_from_location(veh_control.get_landing())
		'''
		vehPos = PositionVector.get_from_location(Location(0,0,10))
		landPos = PositionVector.get_from_location(Location(0,0,0))
		'''
Esempio n. 6
0
    def check_home(self):

        # return immediately if home has already been initialised
        if self.home_initialised:
            return True

        # check for home no more than once every two seconds
        if (time.time() - self.last_home_check > 2):

            # update that we have performed a status check
            self.last_home_check = time.time()

            # check if we have a vehicle
            if self.vehicle is None:
                self.vehicle = self.get_vehicle_with_dronekit()
                return

            # download the vehicle waypoints (and home) if we don't have them already
            if self.mission_cmds is None:
                self.fetch_mission()
                return False

            # ensure the vehicle's position is known
            if self.vehicle.location.global_relative_frame is None:
                print "waiting for vehicle position.."
                return False
            if self.vehicle.location.global_relative_frame.lat is None or self.vehicle.location.global_relative_frame.lon is None or self.vehicle.location.global_relative_frame.alt is None:
                print "waiting for vehicle position.."
                return False

            # get home location from mission command list
            if self.vehicle.home_location is None:
                print "waiting for home location.."
                self.fetch_mission()
                return False

            # sanity check home position
            if self.vehicle.home_location.lat == 0 and self.vehicle.home_location.lon == 0:
                print "home position all zero, re-fetching.."
                self.fetch_mission()
                return False

            # set home position
            PositionVector.set_home_location(
                LocationGlobal(self.vehicle.home_location.lat,
                               self.vehicle.home_location.lon, 0))
            self.home_initialised = True
            print "got home location"

            # To-Do: if we wish to have the same home position as the flight controller
            # we must download the home waypoint again whenever the vehicle is armed

        # return whether home has been initialised or not
        return self.home_initialised
    def check_home(self):

        # return immediately if home has already been initialised
        if self.home_initialised:
            return True

        # check for home no more than once every two seconds
        if (time.time() - self.last_home_check > 2):

            # update that we have performed a status check
            self.last_home_check = time.time()

            # check if we have a vehicle
            if self.vehicle is None:
                self.vehicle = self.get_vehicle_with_dronekit()
                return

            # download the vehicle waypoints (and home) if we don't have them already
            if self.mission_cmds is None:
                self.fetch_mission()
                return False

            # ensure the vehicle's position is known
            if self.vehicle.location.global_relative_frame is None:
                print "waiting for vehicle position.."
                return False
            if self.vehicle.location.global_relative_frame.lat is None or self.vehicle.location.global_relative_frame.lon is None or self.vehicle.location.global_relative_frame.alt is None:
                print "waiting for vehicle position.."
                return False

            # get home location from mission command list
            if self.vehicle.home_location is None:
                print "waiting for home location.."
                self.fetch_mission()
                return False

            # sanity check home position
            if self.vehicle.home_location.lat == 0 and self.vehicle.home_location.lon == 0:
                print "home position all zero, re-fetching.."
                self.fetch_mission()
                return False

            # set home position
            PositionVector.set_home_location(LocationGlobal(self.vehicle.home_location.lat,self.vehicle.home_location.lon,0))
            self.home_initialised = True
            print "got home location"

            # To-Do: if we wish to have the same home position as the flight controller
            # we must download the home waypoint again whenever the vehicle is armed

        # return whether home has been initialised or not
        return self.home_initialised
    def check_home(self):

        # return immediately if home has already been initialised
        if self.home_initialised:
            return True

        # check for home no more than once every two seconds
        if (time.time() - self.last_home_check > 2):

            # update that we have performed a status check
            self.last_home_check = time.time()

            # check if we have a vehicle
            if self.vehicle is None:
                self.vehicle = self.api.get_vehicles()[0]
                return

            # ensure the vehicle's position is known
            if self.vehicle.location is None:
                return False
            if self.vehicle.location.lat is None or self.vehicle.location.lon is None or self.vehicle.location.alt is None:
                return False

            # download the vehicle waypoints if we don't have them already
            if self.mission_cmds is None:
                self.fetch_mission()
                return False

            # get the home lat and lon
            home_lat = self.mission_cmds[0].x
            home_lon = self.mission_cmds[0].y

            # sanity check the home position
            if home_lat is None or home_lon is None:
                return False

            # sanity check again and set home position
            if home_lat <> 0 and home_lon <> 0:
                PositionVector.set_home_location(
                    Location(home_lat, home_lon, 0))
                self.home_initialised = True
            else:
                self.mission_cmds = None

            # To-Do: if we wish to have the same home position as the flight controller
            # we must download the home waypoint again whenever the vehicle is armed

        # return whether home has been initialised or not
        return self.home_initialised
    def check_home(self):

        # return immediately if home has already been initialised
        if self.home_initialised:
            return True

        # check for home no more than once every two seconds
        if (time.time() - self.last_home_check > 2):

            # update that we have performed a status check
            self.last_home_check = time.time()

            # check if we have a vehicle
            if self.vehicle is None:
                self.vehicle = self.api.get_vehicles()[0]
                return

            # ensure the vehicle's position is known
            if self.vehicle.location is None:
                return False
            if self.vehicle.location.lat is None or self.vehicle.location.lon is None or self.vehicle.location.alt is None:
                return False

            # download the vehicle waypoints if we don't have them already
            if self.mission_cmds is None:
                self.fetch_mission()
                return False

            # get the home lat and lon
            home_lat = self.mission_cmds[0].x
            home_lon = self.mission_cmds[0].y

            # sanity check the home position
            if home_lat is None or home_lon is None:
                return False

            # sanity check again and set home position
            if home_lat <> 0 and home_lon <> 0:
                PositionVector.set_home_location(Location(home_lat,home_lon,0))
                self.home_initialised = True
            else:
                self.mission_cmds = None

            # To-Do: if we wish to have the same home position as the flight controller
            # we must download the home waypoint again whenever the vehicle is armed

        # return whether home has been initialised or not
        return self.home_initialised
Esempio n. 10
0
    def analisis_image(self):
        # record time
        now = time.time()
        #PositionVectors = PositionVector()
        # capture vehicle position
        self.vehicle_pos = PositionVector.get_from_location(
            self.vehicle.location.global_relative_frame)

        # capture vehicle attitude in buffer
        self.att_hist.update()

        # get delayed attitude from buffer
        veh_att_delayed = self.att_hist.get_attitude(now - self.attitude_delay)

        # get new image from camera
        f = self.get_frame()
        #create an image processor
        if self.mission_on_guide_mode == 4:
            self.fire_found, xpos, ypos, size = fire_finder.filter(f)
            #result = detector.analyze_frame(f)
            self.attitude = self.get_attitude()
            if self.fire_found == True:
                self.b = 0
                sc_logger.text(sc_logger.GENERAL, "Target terdeteksi!")
                print xpos, ypos, size
                if self.vehicle.mode.name == "GUIDED":
                    sc_logger.text(sc_logger.GENERAL,
                                   "Target terdeteksi, mendekati target!")
                self.move_to_target_fire(xpos, ypos, size, self.attitude,
                                         self.vehicle_pos)
                self.relay1(1)  #lampu indikator on
                #self.servo(0)#test untuk servo kamera
            elif self.vehicle.mode.name == "GUIDED":
                self.relay1(0)  #lampu indikator off
                #self.servo(1)#test untuk servo kamera
                self.b += 1
                if self.b > 25:  #15
                    self.b = 0
                    #print "Api tidak terdeteksi, ubah mode ke AUTO"
                    #self.lanjut_cmd += 1
                    sc_logger.text(
                        sc_logger.GENERAL,
                        "Target tidak terdeteksi, ubah mode ke AUTO")
                    self.a = False
                    self.waktu1 = 0
                    #self.c = True
                    self.controlling_vehicle = False
                    self.vehicle.mode = VehicleMode("AUTO")
                    self.vehicle.flush()
            #rend_Image = detector.add_target_highlights(f, result[3])
            sc_logger.image(sc_logger.GUI, f)

            if not self.writer is None:
                # save image for debugging later
                self.writer.write(f)

            # increment frames analysed for stats
            self.num_frames_analysed += 1
            if self.stats_start_time == 0:
                self.stats_start_time = time.time()
 def get_frame(self):
     if self.use_simulator:
         veh_pos = PositionVector.get_from_location(self.vehicle.location)
         frame = balloon_sim.get_simulated_frame(veh_pos, self.vehicle.attitude.roll, self.vehicle.attitude.pitch, self.vehicle.attitude.yaw)
     else:
         frame = balloon_video.get_image()
     return frame
Esempio n. 12
0
 def get_frame(self):
     if self.use_simulator:
         veh_pos = PositionVector.get_from_location(self.vehicle.location.global_relative_frame)
         frame = balloon_sim.get_simulated_frame(veh_pos, self.vehicle.attitude.roll, self.vehicle.attitude.pitch, self.vehicle.attitude.yaw)
     else:
         frame = balloon_video.get_image()
     return frame
Esempio n. 13
0
 def project_position(self, origin, pitch, yaw, distance):
     cos_pitch = math.cos(pitch)
     dx = distance * math.cos(yaw) * cos_pitch
     dy = distance * math.sin(yaw) * cos_pitch
     dz = distance * math.sin(pitch)
     ret = PositionVector(origin.x + dx, origin.y + dy, origin.z + dz)
     return ret
Esempio n. 14
0
 def put_location(self, timestamp, location, vel):
     loc = PositionVector().get_from_location(location)  # covert to meters
     self.location_buffer[self.lb_index] = (timestamp, loc, vel)
     if (self.lb_index == self.size - 1):
         self.lb_index = 0
     else:
         self.lb_index += 1
 def get_simulated_frame(self, veh_pos, vehicle_roll, vehicle_pitch, vehicle_yaw):
     # get balloon position
     balloon_pos = PositionVector.get_from_location(self.fake_balloon_location)
     # get background
     sim_frame = self.get_background(vehicle_roll, vehicle_pitch)
     # draw balloon on background
     self.draw_fake_balloon(sim_frame, veh_pos, balloon_pos, vehicle_roll, vehicle_pitch, vehicle_yaw)
     return sim_frame
    def main(self):

        # set home to tridge's home field (absolute alt = 270)
        PositionVector.set_home_location(Location(-35.362938,149.165085,0))

        # calculate balloon position
        fake_balloon_pos = PositionVector.get_from_location(self.fake_balloon_location)

        # vehicle attitude and position
        veh_pos = PositionVector(0,0,fake_balloon_pos.z) # at home location
        veh_roll = math.radians(0)     # leaned right 10 deg
        veh_pitch = math.radians(0)     # pitched back at 0 deg
        veh_yaw = PositionVector.get_bearing(veh_pos,fake_balloon_pos)  # facing towards fake balloon

        # display positions from home
        print "Vehicle %s" % veh_pos
        print "Balloon %s" % fake_balloon_pos

        # generate simulated frame of balloon 10m north, 2m above vehicle
        img = self.get_simulated_frame(veh_pos, veh_roll, veh_pitch, veh_yaw)

        while(True):
            # move vehicle towards balloon
            veh_pos = veh_pos + (fake_balloon_pos - veh_pos) * 0.01

            # regenerate frame
            img = self.get_simulated_frame(veh_pos, veh_roll, veh_pitch, veh_yaw)

            # look for balloon in image using blob detector        
            found_in_image, xpos, ypos, size = balloon_finder.analyse_frame(img)

            # display actual vs real distance
            dist_actual = PositionVector.get_distance_xyz(veh_pos, fake_balloon_pos)
            dist_est = balloon_utils.get_distance_from_pixels(size, balloon_finder.balloon_radius_expected)
            print "Dist Est:%f  Act:%f   Size Est:%f  Act:%f" % (dist_est, dist_actual, size, self.last_balloon_radius) 

            # show image
            cv2.imshow("fake balloon", img)

            # wait for keypress 
            k = cv2.waitKey(5) & 0xFF
            if k == 27:
                break

        # destroy windows
        cv2.destroyAllWindows()
Esempio n. 17
0
   def run(self):
       
       while True:
           if self.vehicle.mode.name !="GUIDED": #if we are no longer in guided mode stop receving messages
              logging.info("follower vehicle aborted GUIDED mode")
              self.recv_thread.stop()
              break
           try:
              (message,address) = self.msg_queue.get()
              self.haversine_distance((message.lat,message.lon),(self.vehicle.location.global_relative_frame.lat,self.vehicle.location.global_relative_frame.lon))
              if not self.called:
                  #set home location to home location of master vehicle
                  
                  PositionVector.set_home_location(LocationGlobal(message.home_location.lat,message.home_location.lon,message.home_location.alt))
                  logging.debug("master home location is...")
                  logging.debug(message.home_location)
                  self.called = True
              logging.debug("message received %s %s %s" %(message.lat, message.lon,message.alt))
               
              #altitude = 5 #in metres
              separation = self.distance #distance between drones
             
              original_pos = PositionVector.get_from_location(message)
              logging.debug("position for leader vehicle -> %s heading -> %s" %(original_pos,message.heading))
              logging.debug("leader vehicle groundspeed is %.3f" %message.groundspeed)
              logging.debug("leader vehicle lidar altitude is %.4f m"%message.rangefinder)
              new_pos = selection_coding(original_pos,message.heading,separation)
              
              new_pos = PositionVector(new_pos[0],new_pos[1],new_pos[2])
              logging.debug("position for follower vehicle is %s" %(new_pos))
              
              
              lat = new_pos.get_location().lat
              lon = new_pos.get_location().lon
              alt = new_pos.get_location().alt - message.home_location.alt
              dest = LocationGlobalRelative(lat,lon,alt)
              logging.debug("gps locationglobal data for follower vehicle to procced to is %s" %new_pos.get_location())
              logging.debug("gps locationglobalrelative data for follower vehicle to procced to is %s" %dest)

              self.vehicle.simple_goto(dest)
              set_yaw(self.vehicle,message.heading)
              logging.debug("vehicle groundspeed is %.3f" %self.vehicle.groundspeed)
              logging.debug("vehicle heading is %.3f" %self.vehicle.heading)
              logging.debug("vehicle lidar altitude is %.4f m "%self.vehicle.rangefinder.distance)
              actualpath = PositionVector.get_from_location(self.vehicle.location.global_frame)
              logging.debug("vehicle actual path in X direction is %.5f m" %actualpath.x)
              logging.debug("vehicle actual path in Y direction is %.5f m" %actualpath.y)

              time.sleep(0.1)
             
              logging.debug("**************")
              self.count+=1
              self.msg_queue.task_done()
           except Queue.Empty:
              print ("all messages processed")
Esempio n. 18
0
def earthframe_rad_to_relative_copter(ef_angle_to_target,
                                      location,
                                      alt_above_terrain=None):

    veh_pos = PositionVector().get_from_location(location)

    #get current altitude (constrained to no lower than 50cm)
    if alt_above_terrain is None:
        alt_above_terrain = location.alt
    alt = max(alt_above_terrain, 0.5)

    #convert earth-frame angles to earth-frame position offset
    x = alt * math.tan(ef_angle_to_target.x)  #meters
    y = alt * math.tan(ef_angle_to_target.y)
    z = 0  #not used
    target_pos = PositionVector(x, y, z)

    return target_pos
Esempio n. 19
0
	def inside_landing_area(self):

		vehPos = PositionVector.get_from_location(veh_control.get_location())
		landPos = PositionVector.get_from_location(veh_control.get_landing())
		'''
		vehPos = PositionVector.get_from_location(Location(0,0,10))
		landPos = PositionVector.get_from_location(Location(0,0,0))
		'''
		if(PositionVector.get_distance_xy(vehPos,landPos) < self.landing_area_radius):
			#below area
			if(vehPos.z < self.landing_area_min_alt):
				return -1
			#in area
			else:
				return 1
		#outside area
		else:
			return 0
Esempio n. 20
0
    def inside_landing_area(self):

        vehPos = PositionVector.get_from_location(self.vehicle.location.global_relative_frame)
        landPos = PositionVector.get_from_location(PositionVector.get_home_location())
        '''
        vehPos = PositionVector.get_from_location(Location(0,0,10))
        landPos = PositionVector.get_from_location(Location(0,0,0))
        '''
        if(PositionVector.get_distance_xy(vehPos,landPos) < self.landing_area_radius):
            #below area
            if(vehPos.z < self.landing_area_min_alt):
                print "VehPos.Z: %f " % (vehPos.z)
                return 1
            #in area
            else:
                return 1
        #outside area
        else:
            return 0
Esempio n. 21
0
    def inside_landing_area(self):

        vehPos = PositionVector.get_from_location(veh_control.get_location())
        landPos = PositionVector.get_from_location(veh_control.get_landing())
        '''
		vehPos = PositionVector.get_from_location(Location(0,0,10))
		landPos = PositionVector.get_from_location(Location(0,0,0))
		'''
        if (PositionVector.get_distance_xy(vehPos, landPos) <
                self.landing_area_radius):
            #below area
            if (vehPos.z < self.landing_area_min_alt):
                return -1
            #in area
            else:
                return 1
        #outside area
        else:
            return 0
    def analyze_image(self):

        # record time
        now = time.time()

        # capture vehicle position
        self.vehicle_pos = PositionVector.get_from_location(
            self.vehicle.location.global_relative_frame)

        # capture vehicle attitude in buffer
        self.att_hist.update()

        # get delayed attitude from buffer
        veh_att_delayed = self.att_hist.get_attitude(now - self.attitude_delay)

        # get new image from camera
        f = self.get_frame()

        # look for balloon in image using blob detector
        self.balloon_found, xpos, ypos, size = balloon_finder.analyse_frame(f)

        # add artificial horizon
        balloon_finder.add_artificial_horizon(f, veh_att_delayed.roll,
                                              veh_att_delayed.pitch)

        if self.balloon_found:
            # record time balloon was found
            self.last_spotted_time = now

            # convert x, y position to pitch and yaw direction (in radians)
            self.balloon_pitch, self.balloon_heading = balloon_finder.pixels_to_direction(
                xpos, ypos, veh_att_delayed.roll, veh_att_delayed.pitch,
                veh_att_delayed.yaw)
            self.balloon_pitch = math.radians(self.balloon_pitch)
            self.balloon_pitch_top = self.balloon_pitch + balloon_video.pixels_to_angle_y(
                size)  # add balloon radius so we aim for top of balloon
            self.balloon_heading = math.radians(self.balloon_heading)

            # get distance
            self.balloon_distance = get_distance_from_pixels(
                size, balloon_finder.balloon_radius_expected)

            # updated estimated balloon position
            self.balloon_pos = balloon_finder.project_position(
                self.vehicle_pos, self.balloon_pitch, self.balloon_heading,
                self.balloon_distance)

        # save image for debugging later
        if not self.writer is None:
            self.writer.write(f)

        # increment frames analysed for stats
        self.num_frames_analysed += 1
        if self.stats_start_time == 0:
            self.stats_start_time = time.time()
Esempio n. 23
0
	def __init__(self):
		self.targetLocation = PositionVector()
		self.vehicleLocation = PositionVector()

		self.backgroundColor = (74,88,109)


		#load target
		filename = sc_config.config.get_string('simulator', 'target_location', '/home/dannuge/SmartCamera/target.jpg')
		target_size = sc_config.config.get_float('algorithm', 'outer_ring', 1.0)
		self.load_target(filename,target_size)


		#define camera
		self.camera_width = sc_config.config.get_integer('camera', 'width', 640)
		self.camera_height = sc_config.config.get_integer('camera', 'height', 640)
		self.camera_vfov = sc_config.config.get_float('camera', 'vertical-fov',72.42 )
		self.camera_hfov = sc_config.config.get_float('camera', 'horizontal-fov', 72.42)
		self.camera_fov = math.sqrt(self.camera_vfov**2 + self.camera_hfov**2)
		self.camera_frameRate = 30
    def draw_fake_balloon(self, frame, veh_pos, balloon_pos, vehicle_roll, vehicle_pitch, vehicle_yaw):
        # calculate bearing to balloon
        bearing_to_balloon = PositionVector.get_bearing(veh_pos, balloon_pos)
        yaw_to_balloon = balloon_utils.wrap_PI(bearing_to_balloon-vehicle_yaw)

        # calculate earth frame pitch angle from vehicle to balloon
        pitch_to_balloon = vehicle_pitch + PositionVector.get_elevation(veh_pos, balloon_pos)

        #print "Fake Balloon Bearing:%f Pitch:%f Dist:%f" % (math.degrees(bearing_to_balloon), math.degrees(pitch_to_balloon), dist_to_balloon_xy)

        # calculate pixel position of balloon
        balloon_x = balloon_video.angle_to_pixels_x(yaw_to_balloon) + balloon_video.img_center_x
        balloon_y = balloon_video.angle_to_pixels_y(pitch_to_balloon) + balloon_video.img_center_y

        # calculate size of balloon in pixels from distance and size
        dist_to_balloon_xyz = PositionVector.get_distance_xyz(veh_pos, balloon_pos)
        balloon_radius = balloon_utils.get_pixels_from_distance(dist_to_balloon_xyz, balloon_finder.balloon_radius_expected)

        # store balloon radius
        self.last_balloon_radius = balloon_radius

        # draw balloon
        cv2.circle(frame,(balloon_x,balloon_y), balloon_radius, self.fake_balloon_colour_bgr_scalar, -1)
    def analyze_image(self):

        # record time
        now = time.time()

        # capture vehicle position
        self.vehicle_pos = PositionVector.get_from_location(self.vehicle.location.global_relative_frame)

        # capture vehicle attitude in buffer
        self.att_hist.update()

        # get delayed attitude from buffer
        veh_att_delayed = self.att_hist.get_attitude(now - self.attitude_delay)

        # get new image from camera
        f = self.get_frame()

        # look for balloon in image using blob detector        
        self.balloon_found, xpos, ypos, size = balloon_finder.analyse_frame(f)

        # add artificial horizon
        balloon_finder.add_artificial_horizon(f, veh_att_delayed.roll, veh_att_delayed.pitch)

        if self.balloon_found:
            # record time balloon was found
            self.last_spotted_time = now
        
            # convert x, y position to pitch and yaw direction (in radians)
            self.balloon_pitch, self.balloon_heading = balloon_finder.pixels_to_direction(xpos, ypos, veh_att_delayed.roll, veh_att_delayed.pitch, veh_att_delayed.yaw)
            self.balloon_pitch = math.radians(self.balloon_pitch)
            self.balloon_pitch_top = self.balloon_pitch + balloon_video.pixels_to_angle_y(size)  # add balloon radius so we aim for top of balloon
            self.balloon_heading = math.radians(self.balloon_heading)

            # get distance
            self.balloon_distance = get_distance_from_pixels(size, balloon_finder.balloon_radius_expected)

            # updated estimated balloon position
            self.balloon_pos = balloon_finder.project_position(self.vehicle_pos, self.balloon_pitch, self.balloon_heading, self.balloon_distance)

        # save image for debugging later
        if not self.writer is None:
            self.writer.write(f)

        # increment frames analysed for stats
        self.num_frames_analysed += 1
        if self.stats_start_time == 0:
            self.stats_start_time = time.time()
Esempio n. 26
0
    def main(self):

        # set home to tridge's home field (absolute alt = 270)
        PositionVector.set_home_location(
            LocationGlobal(-35.362938, 149.165085, 0))

        # calculate balloon position
        fake_balloon_pos = PositionVector.get_from_location(
            self.fake_balloon_location)

        # vehicle attitude and position
        veh_pos = PositionVector(0, 0, fake_balloon_pos.z)  # at home location
        veh_roll = math.radians(0)  # leaned right 10 deg
        veh_pitch = math.radians(0)  # pitched back at 0 deg
        veh_yaw = PositionVector.get_bearing(
            veh_pos, fake_balloon_pos)  # facing towards fake balloon

        # display positions from home
        print "Vehicle %s" % veh_pos
        print "Balloon %s" % fake_balloon_pos

        # generate simulated frame of balloon 10m north, 2m above vehicle
        img = self.get_simulated_frame(veh_pos, veh_roll, veh_pitch, veh_yaw)

        while (True):
            # move vehicle towards balloon
            veh_pos = veh_pos + (fake_balloon_pos - veh_pos) * 0.01

            # regenerate frame
            img = self.get_simulated_frame(veh_pos, veh_roll, veh_pitch,
                                           veh_yaw)

            # look for balloon in image using blob detector
            found_in_image, xpos, ypos, size = balloon_finder.analyse_frame(
                img)

            # display actual vs real distance
            dist_actual = PositionVector.get_distance_xyz(
                veh_pos, fake_balloon_pos)
            dist_est = balloon_utils.get_distance_from_pixels(
                size, balloon_finder.balloon_radius_expected)
            print "Dist Est:%f  Act:%f   Size Est:%f  Act:%f" % (
                dist_est, dist_actual, size, self.last_balloon_radius)

            # show image
            cv2.imshow("fake balloon", img)

            # wait for keypress
            k = cv2.waitKey(5) & 0xFF
            if k == 27:
                break

        # destroy windows
        cv2.destroyAllWindows()
Esempio n. 27
0
class VModule(object):
    def __init__(self):
        # the framerate
        self.steps = 10
        self.triggered = False
        self.targetno = 1

    def show_frame(self, no):
        threshfile = "/home/ardupilot/droneapi-python/example/my_app/stereo/thresh" + str(no) + ".png"
        circfile = "/home/ardupilot/droneapi-python/example/my_app/stereo/circled" + str(no) + ".png"
        thresh = cv2.imread(threshfile)
        circ = cv2.imread(circfile)
        cv2.namedWindow("stereo")
        cv2.startWindowThread()
        cv2.imshow("stereo", np.hstack((thresh, circ)))
        cv2.waitKey(1)
        return

    def location_report(self, tarloc, taralti):
        # get target and vehicle locations
        self.targetLoc = PositionVector()
        self.vehicleLoc = PositionVector()
        self.vehicleLoc.set_from_location(drn.get_location())

        full_loc = Location(float(tarloc[0]), float(tarloc[1]), float(taralti), is_relative=True)
        self.targetLoc.set_from_location(full_loc)
        loc = (
            "Vehicle location: "
            + "X: "
            + format(self.vehicleLoc.x)
            + "Y: "
            + format(self.vehicleLoc.y)
            + "Z: "
            + format(self.vehicleLoc.z)
        )
        alog.log.info(loc)

        target_loc = (
            "Target location: "
            + "X: "
            + format(self.targetLoc.x)
            + "Y: "
            + format(self.targetLoc.y)
            + "Z: "
            + format(self.targetLoc.z)
        )
        alog.log.info(target_loc)

        dist = self.vehicleLoc.get_distance_xyz(self.vehicleLoc, self.targetLoc)
        reld = "Relative distance: " + format(dist)
        alog.log.info(reld)
        return dist

        # load_target- load an image to simulate the target.

    def check_target1(self):
        tarloc = [33.775497, -84.396860]  # 33.775497 -84.396860   33.775632 -84.396806
        taralti = 30

        # wait for trigger over here
        reld = self.location_report(tarloc, taralti)
        if not (self.triggered):
            while int(reld) > 10:
                time.sleep(2)
                reld = self.location_report(tarloc, taralti)

        target = cv2.imread("/home/ardupilot/droneapi-python/example/my_app/stereo/1left.png")
        self.load_target(target, 11, 0.0, 0.0)

        # load_target- load an image to simulate the target.

    def check_target2(self):
        tarloc = [33.776888, -84.396367]  # 33.775497 -84.396860   33.776888 -84.396367
        taralti = 50
        self.targetno = 2
        # wait for trigger over here
        reld = self.location_report(tarloc, taralti)
        if not (self.triggered):
            while int(reld) > 10:
                time.sleep(2)
                reld = self.location_report(tarloc, taralti)

        target = cv2.imread("/home/ardupilot/droneapi-python/example/my_app/stereo/2left.png")
        self.load_target(target, 11, 0.0, 0.0)

    def check_target3(self):
        tarloc = [33.777845, -84.396523]  # 33.775497 -84.396860   33.776888 -84.396367	33.777845 -84.396523
        taralti = 70
        self.targetno = 3
        # wait for trigger over here
        reld = self.location_report(tarloc, taralti)
        if not (self.triggered):
            while int(reld) > 10:
                time.sleep(2)
                reld = self.location_report(tarloc, taralti)

        target = cv2.imread("/home/ardupilot/droneapi-python/example/my_app/stereo/3left.png")
        self.load_target(target, 11, 0.0, 0.0)

    def load_target(self, target, zoom, camera_constX, camera_constY):
        frame = CV.get_frame(target, camera_constX, camera_constY, zoom)
        self.load_window(target, frame, zoom, camera_constX, camera_constY)

    def load_window(self, target, frame, zoom, camera_constX, camera_constY):
        self.steps = zoom
        cv2.namedWindow("HUD")
        cv2.startWindowThread()
        while self.steps > 1.0:
            if (int(self.steps * 10) == 91) and (self.targetno == 1):  # ANALYZE THE VIDEO MODULE AND GET RESULTS
                alog.log.debug("Obstacle detected.")
                drn.right_drift(2)
                drn.resume()
                self.show_frame(self.targetno)
                camera_constX = 0.02
                camera_constY = 0.39
            if (int(self.steps * 10) == 91) and (self.targetno == 2):  # ANALYZE THE VIDEO MODULE AND GET RESULTS
                alog.log.debug("Obstacle detected.")
                drn.left_drift(6)
                drn.resume()
                self.show_frame(self.targetno)
                camera_constX = 0.47
                camera_constY = 0.4
            if (int(self.steps * 10) == 91) and (self.targetno == 3):  # ANALYZE THE VIDEO MODULE AND GET RESULTS
                alog.log.debug("Obstacle detected.")
                drn.left_drift(2)
                drn.resume()
                self.show_frame(self.targetno)
                camera_constX = 0.05
                camera_constY = 0.2
            st = "Steps: " + format(self.steps)
            alog.log.debug(st)
            cv2.imshow("HUD", frame)
            cv2.waitKey(1)
            time.sleep(0.1)
            camera_constX = camera_constX
            camera_constY = camera_constY
            self.steps = self.steps - 0.1
            self.load_target(target, self.steps, camera_constX, camera_constY)

        cv2.waitKey(3)
        cv2.destroyAllWindows()
        cv2.waitKey(1)
        alog.log.info("Exiting vision threads")
Esempio n. 28
0
class PrecisionLandSimulator():
    def __init__(self):
        self.targetLocation = PositionVector()
        self.vehicleLocation = PositionVector()

        self.backgroundColor = (74, 88, 109)

        #load target
        filename = sc_config.config.get_string(
            'simulator', 'target_location',
            '/home/dannuge/SmartCamera/target.jpg')
        target_size = sc_config.config.get_float('algorithm', 'outer_ring',
                                                 1.0)
        self.load_target(filename, target_size)

        #define camera
        self.camera_width = sc_config.config.get_integer(
            'camera', 'width', 640)
        self.camera_height = sc_config.config.get_integer(
            'camera', 'height', 640)
        self.camera_vfov = sc_config.config.get_float('camera', 'vertical-fov',
                                                      72.42)
        self.camera_hfov = sc_config.config.get_float('camera',
                                                      'horizontal-fov', 72.42)
        self.camera_fov = math.sqrt(self.camera_vfov**2 + self.camera_hfov**2)
        self.camera_frameRate = 30

    #load_target- load an image to simulate the target. Enter the actaul target size in meters(assuming the target is square)
    def load_target(self, filename, actualSize):
        self.target = cv2.imread(filename)
        self.target_width = self.target.shape[1]
        self.target_height = self.target.shape[0]

        self.actualSize = actualSize
        #scaling factor for real dimensions to simultor pixels
        self.pixels_per_meter = (self.target_height +
                                 self.target_width) / (2.0 * actualSize)

    #set_target_location- give the target a gps location
    def set_target_location(self, location):
        self.targetLocation.set_from_location(location)

    #refresh_simulator - update vehicle position info necessary to simulate an image
    def refresh_simulator(self, vehicleLocation, vehicleAttitude):
        #get gps location of vehicle
        self.vehicleLocation.set_from_location(vehicleLocation)

        self.vehicleAttitude = vehicleAttitude

    #main - code used to test the simulator. Must be run from sitl. Control vehicle in guided mode using arrow keys,r,t,q,e
    def main(self):
        veh_control.connect(local_connect())

        self.set_target_location(veh_control.get_location())

        while (veh_control.is_connected()):
            location = veh_control.get_location()
            attitude = veh_control.get_attitude()

            self.refresh_simulator(location, attitude)
            frame = self.get_frame()
            cv2.imshow('frame', frame)

            key = cv2.waitKey(1)
            print key

            if key == 1113938:
                veh_control.set_velocity(2, 0, 0)  #forward
            elif key == 1113940:
                veh_control.set_velocity(-2, 0, 0)  #backward
            elif key == 1113937:
                veh_control.set_velocity(0, -2, 0)  #left
            elif key == 1113939:
                veh_control.set_velocity(0, 2, 0)  #right
            elif (key == 1048690):
                yaw = math.degrees(attitude.yaw)  #yaw left
                veh_control.set_yaw(yaw - 5)
            elif (key == 1048692):
                yaw = math.degrees(attitude.yaw)  #yaw right
                veh_control.set_yaw(yaw + 5)
            elif (key == 1048677):
                veh_control.set_velocity(0, 0, -2)  #down
            elif (key == 1048689):
                veh_control.set_velocity(0, 0, 2)  #up
            else:
                veh_control.set_velocity(0, 0, 0)  #still

    #project_3D_to_2D - project a 3d point onto a 2d plane. Covert from world perspective to camera perspective
    def project_3D_to_2D(self, thetaX, thetaY, thetaZ, aX, aY, aZ, cX, cY, cZ,
                         height, width, fov):
        dX = math.cos(-thetaY) * (math.sin(-thetaZ) *
                                  (cY - aY) + math.cos(-thetaZ) *
                                  (cX - aX)) - math.sin(-thetaY) * (aZ - cZ)
        dY = math.sin(-thetaX) * (
            math.cos(-thetaY) * (aZ - cZ) + math.sin(-thetaY) *
            (math.sin(-thetaZ) * (cY - aY) + math.cos(-thetaZ) *
             (cX - aX))) + math.cos(-thetaX) * (math.cos(-thetaZ) *
                                                (cY - aY) - math.sin(-thetaZ) *
                                                (cX - aX))
        dZ = math.cos(-thetaX) * (
            math.cos(-thetaY) * (aZ - cZ) + math.sin(-thetaY) *
            (math.sin(-thetaZ) * (cY - aY) + math.cos(-thetaZ) *
             (cX - aX))) - math.sin(-thetaX) * (math.cos(-thetaZ) *
                                                (cY - aY) - math.sin(-thetaZ) *
                                                (cX - aX))

        #veiwer position
        eX = 0
        eY = 0
        eZ = 1.0 / math.tan(math.radians(fov) / 2.0)

        #2D point
        bX = (dX - eX) * (eZ / dZ)
        bY = (dY - eY) * (eZ / dZ)

        #scaled to resolution
        sX = bX * width
        sY = bY * height

        return (sX, sY)

    #simulate_target - simulate an image given the target position[aX,aY,aZ](pixels)and camera position[cX,cY,cZ](pixels) and camera orientation
    def simulate_target(self, thetaX, thetaY, thetaZ, aX, aY, aZ, cX, cY, cZ,
                        camera_height, camera_width, fov):
        img_width = self.target_width
        img_height = self.target_height

        #point maps
        corners = np.float32([[-img_width / 2, img_height / 2],
                              [img_width / 2, img_height / 2],
                              [-img_width / 2, -img_height / 2],
                              [img_width / 2, -img_height / 2]])
        newCorners = np.float32([[0, 0], [0, 0], [0, 0], [0, 0]])

        #calculate projection for four corners of image
        for i in range(0, len(corners)):

            #shift to world
            x = corners[i][0] + cX - img_width / 2.0
            y = corners[i][1] + cY - img_height / 2.0

            #calculate perspective and position
            x, y = self.project_3D_to_2D(thetaX, thetaY, thetaZ, aY, aX, aZ, y,
                                         x, cZ, camera_height, camera_width,
                                         fov)

            #shift to camera
            x, y = shift_to_image((x, y), camera_width, camera_height)
            newCorners[i] = x, y

        #project image
        M = cv2.getPerspectiveTransform(corners, newCorners)
        sim = cv2.warpPerspective(self.target,
                                  M, (self.camera_width, self.camera_height),
                                  borderValue=self.backgroundColor)

        return sim

    #get_frame - retreive a simulated camera image
    def get_frame(self):
        start = current_milli_time()

        #distance bewteen camera and target in meters
        aX, aY, aZ = self.targetLocation.x, self.targetLocation.y, self.targetLocation.z
        cX, cY, cZ = self.vehicleLocation.x, self.vehicleLocation.y, self.vehicleLocation.z

        #camera angle
        thetaX = self.vehicleAttitude.pitch
        thetaY = self.vehicleAttitude.roll
        thetaZ = self.vehicleAttitude.yaw

        #convert distance bewtween camera and target in pixels
        aX = aX * self.pixels_per_meter
        aY = aY * self.pixels_per_meter
        aZ = aZ * self.pixels_per_meter
        cX = cX * self.pixels_per_meter
        cY = cY * self.pixels_per_meter
        cZ = cZ * self.pixels_per_meter

        #render image
        sim = self.simulate_target(thetaX, thetaY, thetaZ, aX, aY, aZ, cX, cY,
                                   cZ, self.camera_height, self.camera_width,
                                   self.camera_fov)

        #simulate framerate
        while (1000 / self.camera_frameRate > current_milli_time() - start):
            pass
        return sim
Esempio n. 29
0
vehicle.armed = True
while not vehicle.armed:
    print " Waiting for arming..."
    time.sleep(1)
print " Vehicle is armed: %s" % vehicle.armed

#get Vehicle Home location
while not vehicle.home_location:
    cmds = vehicle.commands
    cmds.download()
    cmds.wait_ready()
    if not vehicle.home_location:
        print " Waiting for home location ..."

#Set origin point to home
PositionVector.set_home_location(vehicle.home_location)

#Read waypoints from mission file
waypoints = mission_to_locations(read_mission("thunderhill.mission"))

#navigate waypoints in manual mode
DIST_THRESH = 4  #distance to waypoint before moving onto next waypoint
P_TURN = 1  #p term of navigation controller
SPEED = 100  #throttle level

wp_cnt = 1
for wp in waypoints:
    #convert global lat/lon local meters(position vectors)
    wp_posvec = PositionVector.get_from_location(wp)
    veh_posvec = PositionVector.get_from_location(
        vehicle.location.global_relative_frame)
Esempio n. 30
0
 def get_location(self, timestamp):
     pnt = self._interpolate(self.location_buffer, timestamp)
     if pnt is not None:
         pos = PositionVector(pnt.x, pnt.y, pnt.z)
         return pos.get_location()
     return None
Esempio n. 31
0
while not vehicle.armed:
    print " Waiting for arming..."
    time.sleep(1)
print " Vehicle is armed: %s" % vehicle.armed


#get Vehicle Home location
while not vehicle.home_location:
    cmds = vehicle.commands
    cmds.download()
    cmds.wait_ready()
    if not vehicle.home_location:
        print " Waiting for home location ..."

#Set origin point to home
PositionVector.set_home_location(vehicle.home_location)

#Read waypoints from mission file
waypoints = mission_to_locations(read_mission("../thunderhill.mission"))

#navigate waypoints in manual mode
DIST_THRESH = 4 #distance to waypoint before moving onto next waypoint
P_TURN = 1  #p term of navigation controller
SPEED = 100 #throttle level

wp_cnt = 1
for wp in waypoints:
    #convert global lat/lon local meters(position vectors)
    wp_posvec = PositionVector.get_from_location(wp)
    veh_posvec = PositionVector.get_from_location(vehicle.location.global_relative_frame)
    dist = PositionVector.get_distance_xy(veh_posvec,wp_posvec)
Esempio n. 32
0
#Python Imports
import math
import time
import os

#Dronekit Imports
from dronekit import VehicleMode, Attitude, connect, LocationGlobalRelative
from dronekit_sitl import SITL

#Common Library Imports
from position_vector import PositionVector
import flight_assist

#List of global variables
targetLocation = PositionVector()
vehicleLocation = PositionVector()
vehicleAttitude = 0
backgroundColor = (74,88,109)
filename = (os.path.dirname(os.path.realpath(__file__)))+"/Resources/target.PNG"
target_size = 1.5
camera_width = 640
camera_height = 480
camera_vfov = 60
camera_hfov = 60
camera_fov = math.sqrt(camera_vfov**2 + camera_hfov**2)
camera_frameRate = 30
current_milli_time = lambda: int(round(time.time() * 1000))

def load_target(filename, actualS=1.5):
	global target, target_width, target_height
Esempio n. 33
0
 def get_location(self,timestamp):
     pnt = self._interpolate(self.location_buffer,timestamp)
     if pnt is not None:
         pos = PositionVector(pnt.x, pnt.y, pnt.z)
         return pos.get_location()
     return None
parser.add_argument(
    '--connect',
    help=
    "vehicle connection target string. If not specified, SITL automatically started and used."
)
parser.add_argument(
    '--baudrate',
    type=int,
    help=
    "Vehicle baudrate settings specify 57600 when connecting with 3dr telemetry radio.",
    default=115200)

args = parser.parse_args()

connection_string = args.connect
pos1 = PositionVector(10, 0, 5)
pos2 = PositionVector(10, -10, 5)
dist_tol = 1.0  #used to determine if uav has reached allocated position

if not args.connect:
    print "vehicle path not specified"
    sys.exit(1)  #abort cause of error..

# Connect to the Vehicle.
#   Set `wait_ready=True` to ensure default attributes are populated before `connect()` return
print "\nConnecting to vehicle on: %s" % connection_string
vehicle = connect(connection_string, wait_ready=True, baud=args.baudrate)
v = Vehicle_params()
param = Params(vehicle, v)
#get home position ...
home_position = v.home_location
Esempio n. 35
0
class PrecisionLandSimulator():


	def __init__(self):
		self.targetLocation = PositionVector()
		self.vehicleLocation = PositionVector()

		self.backgroundColor = (74,88,109)


		#load target
		#filename = sc_config.config.get_string('simulator', 'target_location', '/home/odroid/SmartCamera/red.jpg')
		filename = sc_config.config.get_string('simulator', 'target_location', '/home/odroid/SmartCamera/target.jpg')
		target_size = sc_config.config.get_float('algorithm', 'outer_ring', 1.0)
		self.load_target(filename,target_size)


		#define camera
		self.camera_width = sc_config.config.get_integer('camera', 'width', 640)
		self.camera_height = sc_config.config.get_integer('camera', 'height', 640)
		self.camera_vfov = sc_config.config.get_float('camera', 'vertical-fov',72.42 )
		self.camera_hfov = sc_config.config.get_float('camera', 'horizontal-fov', 72.42)
		self.camera_fov = math.sqrt(self.camera_vfov**2 + self.camera_hfov**2)
		self.camera_frameRate = 30

	#load_target- load an image to simulate the target. Enter the actaul target size in meters(assuming the target is square)
	def load_target(self,filename, actualSize):
		self.target = cv2.imread(filename)
		self.target_width = self.target.shape[1]
		self.target_height = self.target.shape[0]


		self.actualSize = actualSize
		#scaling factor for real dimensions to simultor pixels
		self.pixels_per_meter = (self.target_height + self.target_width) / (2.0 * actualSize) 


	#set_target_location- give the target a gps location
	def set_target_location(self, location):
		self.targetLocation.set_from_location(location)


	#refresh_simulator - update vehicle position info necessary to simulate an image 
	def refresh_simulator(self, vehicleLocation, vehicleAttitude):
		#get gps location of vehicle
		self.vehicleLocation.set_from_location(vehicleLocation)

		self.vehicleAttitude = vehicleAttitude


	#main - code used to test the simulator. Must be run from sitl. Control vehicle in guided mode using arrow keys,r,t,q,e
	def main(self):
		veh_control.connect(local_connect())

		self.set_target_location(veh_control.get_location())

		while(veh_control.is_connected()):
		    location = veh_control.get_location()
		    attitude = veh_control.get_attitude()
		    
		    self.refresh_simulator(location,attitude)
		    frame = self.get_frame()
		    cv2.imshow('frame',frame)

		    key = cv2.waitKey(1)
		    print key

		    if key ==1113938:
		    	veh_control.set_velocity(2,0,0) #forward
		    elif key == 1113940:
		    	veh_control.set_velocity(-2,0,0) #backward
		    elif key == 1113937:
		    	veh_control.set_velocity(0,-2,0) #left
		    elif key ==1113939:
		    	veh_control.set_velocity(0,2,0) #right
		    elif(key == 1048690):
		    	yaw = math.degrees(attitude.yaw) #yaw left
		    	veh_control.set_yaw(yaw - 5)
		    elif(key == 1048692):
		    	yaw = math.degrees(attitude.yaw) #yaw right
		    	veh_control.set_yaw(yaw + 5)
		    elif(key == 1048677):
		    	veh_control.set_velocity(0,0,-2) #down
		    elif(key == 1048689):
		    	veh_control.set_velocity(0,0,2) #up
		    else:
				veh_control.set_velocity(0,0,0) #still


	#project_3D_to_2D - project a 3d point onto a 2d plane. Covert from world perspective to camera perspective
	def project_3D_to_2D(self,thetaX,thetaY,thetaZ, aX, aY,aZ, cX, cY, cZ, height, width, fov):
		dX = math.cos(-thetaY) * (math.sin(-thetaZ)*(cY-aY) + math.cos(-thetaZ)*(cX-aX)) - math.sin(-thetaY)*(aZ-cZ)
		dY = math.sin(-thetaX) * (math.cos(-thetaY)*(aZ-cZ) + math.sin(-thetaY)*(math.sin(-thetaZ)*(cY-aY) + math.cos(-thetaZ)*(cX-aX))) + math.cos(-thetaX)*(math.cos(-thetaZ)*(cY-aY) - math.sin(-thetaZ) * (cX-aX))
		dZ = math.cos(-thetaX) * (math.cos(-thetaY)*(aZ-cZ) + math.sin(-thetaY)*(math.sin(-thetaZ)*(cY-aY) + math.cos(-thetaZ)*(cX-aX))) - math.sin(-thetaX)*(math.cos(-thetaZ)*(cY-aY) - math.sin(-thetaZ) * (cX-aX))

		#veiwer position
		eX = 0
		eY = 0
		eZ = 1.0/math.tan(math.radians(fov)/2.0)

		#2D point
		bX = (dX - eX)*(eZ/dZ)
		bY = (dY - eY)*(eZ/dZ)

		#scaled to resolution
		sX = bX * width
		sY = bY * height

		return (sX,sY)


	#simulate_target - simulate an image given the target position[aX,aY,aZ](pixels)and camera position[cX,cY,cZ](pixels) and camera orientation
	def simulate_target(self,thetaX,thetaY,thetaZ, aX, aY, aZ, cX, cY, cZ, camera_height, camera_width, fov):
		img_width = self.target_width
		img_height = self.target_height

		#point maps
		corners = np.float32([[-img_width/2,img_height/2],[img_width/2 ,img_height/2],[-img_width/2,-img_height/2],[img_width/2, -img_height/2]])
		newCorners = np.float32([[0,0],[0,0],[0,0],[0,0]])


		#calculate projection for four corners of image
		for i in range(0,len(corners)):

			#shift to world
			x = corners[i][0] + cX - img_width/2.0
			y = corners[i][1] + cY - img_height/2.0


			#calculate perspective and position
			x , y = self.project_3D_to_2D(thetaX,thetaY,thetaZ, aY, aX, aZ, y, x, cZ,camera_height,camera_width,fov) 

			#shift to camera
			x , y = shift_to_image((x,y),camera_width,camera_height)
			newCorners[i] = x,y  


		#project image
		M = cv2.getPerspectiveTransform(corners,newCorners)
		sim = cv2.warpPerspective(self.target,M,(self.camera_width,self.camera_height),borderValue=self.backgroundColor)

		return sim



	#get_frame - retreive a simulated camera image
	def get_frame(self):
		start = current_milli_time()

		#distance bewteen camera and target in meters
		aX,aY,aZ = self.targetLocation.x, self.targetLocation.y, self.targetLocation.z
		cX,cY,cZ = self.vehicleLocation.x, self.vehicleLocation.y, self.vehicleLocation.z

		#camera angle
		thetaX = self.vehicleAttitude.pitch
		thetaY = self.vehicleAttitude.roll
		thetaZ = self.vehicleAttitude.yaw

		#convert distance bewtween camera and target in pixels
		aX = aX * self.pixels_per_meter
		aY = aY * self.pixels_per_meter
		aZ = aZ * self.pixels_per_meter
		cX = cX * self.pixels_per_meter
		cY = cY * self.pixels_per_meter
		cZ = cZ * self.pixels_per_meter
		

		#render image
		sim = self.simulate_target(thetaX,thetaY,thetaZ, aX, aY, aZ, cX, cY, cZ, self.camera_height, self.camera_width, self.camera_fov)
		
		#simulate framerate
		while(1000/self.camera_frameRate > current_milli_time() - start):
			pass
		return sim