Ejemplo n.º 1
0
    def __init__(self, videopath, calibrationfile, bblpath, cam_angle_degrees=0, initial_offset=0):
        # General video stuff
        self.cap = cv2.VideoCapture(videopath)
        self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        self.fps = self.cap.get(cv2.CAP_PROP_FPS)
        self.num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))


        # Camera undistortion stuff
        self.undistort = FisheyeCalibrator()
        self.undistort.load_calibration_json(calibrationfile, True)
        self.map1, self.map2 = self.undistort.get_maps(1.6,new_img_dim=(self.width,self.height))

        # Get gyro data
        self.bbe = BlackboxExtractor("test_clips/GX015563.MP4_emuf_004.bbl")
        self.gyro_data = self.bbe.get_gyro_data(cam_angle_degrees=cam_angle_degrees)

        # This seems to make the orientation match. Implement auto match later
        self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]
        self.gyro_data[:,2] = -self.gyro_data[:,2]

        # Other attributes
        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0], degrees=True).as_quat()

        self.integrator = GyroIntegrator(self.gyro_data,initial_orientation=initial_orientation)
        self.integrator.integrate_all()
        self.times = None
        self.stab_transform = None

        self.initial_offset = initial_offset
Ejemplo n.º 2
0
    def manual_sync_correction(self, d1, d2, smooth=0.8):
        v1 = self.v1
        v2 = self.v2

        transforms1 = self.transforms1
        transforms2 = self.transforms2
        times1 = self.times1
        times2 = self.times2

        print("v1: {}, v2: {}, d1: {}, d2: {}".format(v1, v2, d1, d2))



        err_slope = (d2-d1)/(v2-v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope*v1)#  + 1.5/self.fps

        interval = 1/(correction_slope * self.fps)

        print("Start {}".format(gyro_start))

        print("Interval {}, slope {}".format(interval, correction_slope))



        #plt.plot(times1, transforms1[:,2] * self.fps)
        #plt.plot(times2, transforms2[:,2] * self.fps)
        #plt.plot((self.integrator.get_raw_data("t") + gyro_start)*correction_slope, self.integrator.get_raw_data("z"))
        #plt.plot((self.integrator.get_raw_data("t") + d2), self.integrator.get_raw_data("z"))
        #plt.plot((self.integrator.get_raw_data("t") + d1), self.integrator.get_raw_data("z"))
        #plt.figure()
        #plt.plot(times1, -transforms1[:,0] * self.fps)
        #plt.plot(times2, -transforms2[:,0] * self.fps)
        #plt.plot((self.integrator.get_raw_data("t") + gyro_start)*correction_slope, self.integrator.get_raw_data("x"))
        
        #plt.figure()
        
        #plt.plot(times1, -transforms1[:,1] * self.fps)
        #plt.plot(times2, -transforms2[:,1] * self.fps)
        #plt.plot((self.integrator.get_raw_data("t") + gyro_start)*correction_slope, self.integrator.get_raw_data("y"))

        #plt.show()

        # Temp new integrator with corrected time scale

        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0], degrees=True).as_quat()

        new_gyro_data = self.gyro_data

        # Correct time scale
        new_gyro_data[:,0] = (new_gyro_data[:,0]+gyro_start) *correction_slope

        new_integrator = GyroIntegrator(new_gyro_data,zero_out_time=False, initial_orientation=initial_orientation)
        new_integrator.integrate_all()

        # Doesn't work for BBL for some reason. TODO: Figure out why
        #self.times, self.stab_transform = new_integrator.get_interpolated_stab_transform(smooth=smooth,start=0,interval = 1/self.fps)

        self.times, self.stab_transform = self.integrator.get_interpolated_stab_transform(smooth=smooth,start=-gyro_start,interval = interval)
Ejemplo n.º 3
0
    def __init__(self, videopath, calibrationfile, gyrocsv, fov_scale = 1.6):
        # General video stuff
        self.undistort_fov_scale = fov_scale
        self.cap = cv2.VideoCapture(videopath)
        self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        self.fps = self.cap.get(cv2.CAP_PROP_FPS)
        self.num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))


        # Camera undistortion stuff
        self.undistort = FisheyeCalibrator()
        self.undistort.load_calibration_json(calibrationfile, True)
        self.map1, self.map2 = self.undistort.get_maps(self.undistort_fov_scale,new_img_dim=(self.width,self.height))

        # Get gyro data

        self.gyro_data = self.instaCSVGyro(gyrocsv)
        hero = 6

        # Hero 6??
        if hero == 6:
            self.gyro_data[:,1] = self.gyro_data[:,1]
            self.gyro_data[:,2] = -self.gyro_data[:,2]
            self.gyro_data[:,3] = self.gyro_data[:,3]
        elif hero == 5:
            self.gyro_data[:,1] = -self.gyro_data[:,1]
            self.gyro_data[:,2] = self.gyro_data[:,2]
            self.gyro_data[:,3] = -self.gyro_data[:,3]
            self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]

        elif hero == 8:
            # Hero 8??
            self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]

        

        #gyro_data[:,1] = gyro_data[:,1]
        #gyro_data[:,2] = -gyro_data[:,2]
        #gyro_data[:,3] = gyro_data[:,3]

        #gyro_data[:,1:] = -gyro_data[:,1:]

        #points_test = np.array([[[0,0],[100,100],[200,300],[400,400]]], dtype = np.float32)
        #result = self.undistort.undistort_points(points_test, new_img_dim=(self.width,self.height))
        #print(result)
        #exit()

        # Other attributes
        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0], degrees=True).as_quat()

        self.integrator = GyroIntegrator(self.gyro_data,zero_out_time=False,initial_orientation=initial_orientation)
        self.integrator.integrate_all()
        self.times = None
        self.stab_transform = None


        self.initial_offset = 0
Ejemplo n.º 4
0
class BBLStabilizer(Stabilizer):
    def __init__(self, videopath, calibrationfile, bblpath, cam_angle_degrees=0, initial_offset=0):
        # General video stuff
        self.cap = cv2.VideoCapture(videopath)
        self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        self.fps = self.cap.get(cv2.CAP_PROP_FPS)
        self.num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))


        # Camera undistortion stuff
        self.undistort = FisheyeCalibrator()
        self.undistort.load_calibration_json(calibrationfile, True)
        self.map1, self.map2 = self.undistort.get_maps(1.6,new_img_dim=(self.width,self.height))

        # Get gyro data
        self.bbe = BlackboxExtractor("test_clips/GX015563.MP4_emuf_004.bbl")
        self.gyro_data = self.bbe.get_gyro_data(cam_angle_degrees=cam_angle_degrees)

        # This seems to make the orientation match. Implement auto match later
        self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]
        self.gyro_data[:,2] = -self.gyro_data[:,2]

        # Other attributes
        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0], degrees=True).as_quat()

        self.integrator = GyroIntegrator(self.gyro_data,initial_orientation=initial_orientation)
        self.integrator.integrate_all()
        self.times = None
        self.stab_transform = None

        self.initial_offset = initial_offset

    
    def stabilization_settings(self, smooth = 0.99):


        v1 = 20 / self.fps
        v2 = 900 / self.fps
        d1 = 0.042
        d2 = -0.396

        err_slope = (d2-d1)/(v2-v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope*v1)

        interval = 1/(correction_slope * self.fps)


        print("Start {}".format(gyro_start))

        print("Interval {}, slope {}".format(interval, correction_slope))

        self.times, self.stab_transform = self.integrator.get_interpolated_stab_transform(smooth=0.985,start=2.56+0.07,interval = 1/59.94)
Ejemplo n.º 5
0
    def __init__(self, videopath, calibrationfile):
        # General video stuff
        self.cap = cv2.VideoCapture(videopath)
        self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        self.fps = self.cap.get(cv2.CAP_PROP_FPS)
        self.num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))

        # Camera undistortion stuff
        self.undistort = FisheyeCalibrator()
        self.undistort.load_calibration_json(calibrationfile, True)
        self.map1, self.map2 = self.undistort.get_maps(
            1.6, new_img_dim=(self.width, self.height))

        # Get gyro data
        self.gpmf = Extractor(videopath)
        self.gyro_data = self.gpmf.get_gyro(True)

        # Hero 6??
        #self.gyro_data[:,1] = self.gyro_data[:,1]
        #self.gyro_data[:,2] = -self.gyro_data[:,2]
        #self.gyro_data[:,3] = self.gyro_data[:,3]

        # Hero 8??
        self.gyro_data[:, [2, 3]] = self.gyro_data[:, [3, 2]]
        #gyro_data[:,1] = gyro_data[:,1]
        #gyro_data[:,2] = -gyro_data[:,2]
        #gyro_data[:,3] = gyro_data[:,3]

        #gyro_data[:,1:] = -gyro_data[:,1:]

        # Other attributes
        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0],
                                                  degrees=True).as_quat()

        self.integrator = GyroIntegrator(
            self.gyro_data, initial_orientation=initial_orientation)
        self.integrator.integrate_all()
        self.times = None
        self.stab_transform = None
Ejemplo n.º 6
0
class InstaStabilizer(Stabilizer):
    def __init__(self, videopath, calibrationfile, gyrocsv, fov_scale = 1.6):
        # General video stuff
        self.undistort_fov_scale = fov_scale
        self.cap = cv2.VideoCapture(videopath)
        self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        self.fps = self.cap.get(cv2.CAP_PROP_FPS)
        self.num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))


        # Camera undistortion stuff
        self.undistort = FisheyeCalibrator()
        self.undistort.load_calibration_json(calibrationfile, True)
        self.map1, self.map2 = self.undistort.get_maps(self.undistort_fov_scale,new_img_dim=(self.width,self.height))

        # Get gyro data

        self.gyro_data = self.instaCSVGyro(gyrocsv)
        hero = 6

        # Hero 6??
        if hero == 6:
            self.gyro_data[:,1] = self.gyro_data[:,1]
            self.gyro_data[:,2] = -self.gyro_data[:,2]
            self.gyro_data[:,3] = self.gyro_data[:,3]
        elif hero == 5:
            self.gyro_data[:,1] = -self.gyro_data[:,1]
            self.gyro_data[:,2] = self.gyro_data[:,2]
            self.gyro_data[:,3] = -self.gyro_data[:,3]
            self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]

        elif hero == 8:
            # Hero 8??
            self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]

        

        #gyro_data[:,1] = gyro_data[:,1]
        #gyro_data[:,2] = -gyro_data[:,2]
        #gyro_data[:,3] = gyro_data[:,3]

        #gyro_data[:,1:] = -gyro_data[:,1:]

        #points_test = np.array([[[0,0],[100,100],[200,300],[400,400]]], dtype = np.float32)
        #result = self.undistort.undistort_points(points_test, new_img_dim=(self.width,self.height))
        #print(result)
        #exit()

        # Other attributes
        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0], degrees=True).as_quat()

        self.integrator = GyroIntegrator(self.gyro_data,zero_out_time=False,initial_orientation=initial_orientation)
        self.integrator.integrate_all()
        self.times = None
        self.stab_transform = None


        self.initial_offset = 0

    def instaCSVGyro(self, csvfile):
        gyrodata = []
        with open(csvfile) as f:
            reader = csv.reader(f, delimiter=",", quotechar='"')
            next(reader, None)
            for row in reader:
                gyro = [float(row[0])] + [float(val) for val in row[2].split(" ")] # Time + gyro
                gyrodata.append(gyro)

        gyrodata = np.array(gyrodata)
        print(gyrodata)
        return gyrodata
    
    def stabilization_settings(self, smooth = 0.95):


        v1 = 20 / self.fps
        v2 = 900 / self.fps
        d1 = 0.042
        d2 = -0.396

        err_slope = (d2-d1)/(v2-v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope*v1)

        interval = 1/(correction_slope * self.fps)


        print("Start {}".format(gyro_start))

        print("Interval {}, slope {}".format(interval, correction_slope))

        self.times, self.stab_transform = self.integrator.get_interpolated_stab_transform(smooth=smooth,start=-gyro_start,interval = interval) # 2.2/30 , -1/30
Ejemplo n.º 7
0
class GPMFStabilizer(Stabilizer):
    def __init__(self, videopath, calibrationfile, hero = 8, fov_scale = 1.6):
        # General video stuff
        self.undistort_fov_scale = fov_scale
        self.cap = cv2.VideoCapture(videopath)
        self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        self.fps = self.cap.get(cv2.CAP_PROP_FPS)
        self.num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))


        # Camera undistortion stuff
        self.undistort = FisheyeCalibrator()
        self.undistort.load_calibration_json(calibrationfile, True)
        self.map1, self.map2 = self.undistort.get_maps(self.undistort_fov_scale,new_img_dim=(self.width,self.height))

        # Get gyro data
        self.gpmf = Extractor(videopath)
        self.gyro_data = self.gpmf.get_gyro(True)

        # Hero 6??
        if hero == 6:
            self.gyro_data[:,1] = self.gyro_data[:,1]
            self.gyro_data[:,2] = -self.gyro_data[:,2]
            self.gyro_data[:,3] = self.gyro_data[:,3]
        elif hero == 5:
            self.gyro_data[:,1] = -self.gyro_data[:,1]
            self.gyro_data[:,2] = self.gyro_data[:,2]
            self.gyro_data[:,3] = -self.gyro_data[:,3]
            self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]

        elif hero == 8:
            # Hero 8??
            self.gyro_data[:,[2, 3]] = self.gyro_data[:,[3, 2]]

        

        #gyro_data[:,1] = gyro_data[:,1]
        #gyro_data[:,2] = -gyro_data[:,2]
        #gyro_data[:,3] = gyro_data[:,3]

        #gyro_data[:,1:] = -gyro_data[:,1:]

        #points_test = np.array([[[0,0],[100,100],[200,300],[400,400]]], dtype = np.float32)
        #result = self.undistort.undistort_points(points_test, new_img_dim=(self.width,self.height))
        #print(result)
        #exit()

        # Other attributes
        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0], degrees=True).as_quat()

        self.integrator = GyroIntegrator(self.gyro_data,initial_orientation=initial_orientation)
        self.integrator.integrate_all()
        self.times = None
        self.stab_transform = None


        self.initial_offset = 0

    
    def stabilization_settings(self, smooth = 0.95):


        v1 = 20 / self.fps
        v2 = 900 / self.fps
        d1 = 0.042
        d2 = -0.396

        err_slope = (d2-d1)/(v2-v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope*v1)

        interval = 1/(correction_slope * self.fps)


        print("Start {}".format(gyro_start))

        print("Interval {}, slope {}".format(interval, correction_slope))

        self.times, self.stab_transform = self.integrator.get_interpolated_stab_transform(smooth=smooth,start=-gyro_start,interval = interval) # 2.2/30 , -1/30
Ejemplo n.º 8
0
    def auto_sync_stab(self, smooth=0.8, sliceframe1 = 10, sliceframe2 = 1000, slicelength = 50):
        v1 = (sliceframe1 + slicelength/2) / self.fps
        v2 = (sliceframe2 + slicelength/2) / self.fps
        d1, times1, transforms1 = self.optical_flow_comparison(sliceframe1, slicelength)
        #self.initial_offset = d1
        d2, times2, transforms2 = self.optical_flow_comparison(sliceframe2, slicelength)

        self.times1 = times1
        self.times2 = times2
        self.transforms1 = transforms1
        self.transforms2 = transforms2
        self.v1 = v1
        self.v2 = v2
        self.d1 = d1
        self.d2 = d2


        print("v1: {}, v2: {}, d1: {}, d2: {}".format(v1, v2, d1, d2))

        err_slope = (d2-d1)/(v2-v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope*v1)#  + 1.5/self.fps

        interval = 1/(correction_slope * self.fps)

        print("Start {}".format(gyro_start))

        print("Interval {}, slope {}".format(interval, correction_slope))



        plt.plot(times1, transforms1[:,2] * self.fps)
        plt.plot(times2, transforms2[:,2] * self.fps)
        plt.plot((self.integrator.get_raw_data("t") + gyro_start)*correction_slope, self.integrator.get_raw_data("z"))
        #plt.plot((self.integrator.get_raw_data("t") + d2), self.integrator.get_raw_data("z"))
        #plt.plot((self.integrator.get_raw_data("t") + d1), self.integrator.get_raw_data("z"))
        plt.figure()
        plt.plot(times1, -transforms1[:,0] * self.fps)
        plt.plot(times2, -transforms2[:,0] * self.fps)
        plt.plot((self.integrator.get_raw_data("t") + gyro_start)*correction_slope, self.integrator.get_raw_data("x"))
        
        plt.figure()
        
        plt.plot(times1, -transforms1[:,1] * self.fps)
        plt.plot(times2, -transforms2[:,1] * self.fps)
        plt.plot((self.integrator.get_raw_data("t") + gyro_start)*correction_slope, self.integrator.get_raw_data("y"))

        plt.show()

        # Temp new integrator with corrected time scale

        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0], degrees=True).as_quat()

        new_gyro_data = self.gyro_data

        # Correct time scale
        new_gyro_data[:,0] = (new_gyro_data[:,0]+gyro_start) *correction_slope

        new_integrator = GyroIntegrator(new_gyro_data,zero_out_time=True, initial_orientation=initial_orientation)
        new_integrator.integrate_all()

        # Doesn't work for BBL for some reason. TODO: Figure out why
        #self.times, self.stab_transform = new_integrator.get_interpolated_stab_transform(smooth=smooth,start=0,interval = 1/self.fps)

        self.times, self.stab_transform = self.integrator.get_interpolated_stab_transform(smooth=smooth,start=-gyro_start,interval = interval)
Ejemplo n.º 9
0
map1, map2 = undistort.get_maps(2)

bb = BlackboxExtractor("test_clips/GX015563.MP4_emuf_004.bbl")

gyro_data = bb.get_gyro_data(cam_angle_degrees=-2)

gyro_data[:, [2, 3]] = gyro_data[:, [3, 2]]

gyro_data[:, 2] = -gyro_data[:, 2]

#gyro_data[:,1:] = -gyro_data[:,1:]

initial_orientation = Rotation.from_euler('xyz', [0, 0, 0],
                                          degrees=True).as_quat()

integrator = GyroIntegrator(gyro_data, initial_orientation=initial_orientation)
integrator.integrate_all()

times, stab_transform = integrator.get_interpolated_stab_transform(
    smooth=0.985, start=2.56 + 0.07, interval=1 / 59.94)

#cap = cv2.VideoCapture(inpath)

i = 0
while (True):
    # Read next frame
    success, frame = cap.read()
    if success:
        i += 1

    if i > 2000:
Ejemplo n.º 10
0
class GPMFStabilizer:
    def __init__(self, videopath, calibrationfile):
        # General video stuff
        self.cap = cv2.VideoCapture(videopath)
        self.width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
        self.height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
        self.fps = self.cap.get(cv2.CAP_PROP_FPS)
        self.num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))

        # Camera undistortion stuff
        self.undistort = FisheyeCalibrator()
        self.undistort.load_calibration_json(calibrationfile, True)
        self.map1, self.map2 = self.undistort.get_maps(
            1.6, new_img_dim=(self.width, self.height))

        # Get gyro data
        self.gpmf = Extractor(videopath)
        self.gyro_data = self.gpmf.get_gyro(True)

        # Hero 6??
        #self.gyro_data[:,1] = self.gyro_data[:,1]
        #self.gyro_data[:,2] = -self.gyro_data[:,2]
        #self.gyro_data[:,3] = self.gyro_data[:,3]

        # Hero 8??
        self.gyro_data[:, [2, 3]] = self.gyro_data[:, [3, 2]]
        #gyro_data[:,1] = gyro_data[:,1]
        #gyro_data[:,2] = -gyro_data[:,2]
        #gyro_data[:,3] = gyro_data[:,3]

        #gyro_data[:,1:] = -gyro_data[:,1:]

        # Other attributes
        initial_orientation = Rotation.from_euler('xyz', [0, 0, 0],
                                                  degrees=True).as_quat()

        self.integrator = GyroIntegrator(
            self.gyro_data, initial_orientation=initial_orientation)
        self.integrator.integrate_all()
        self.times = None
        self.stab_transform = None

    def stabilization_settings(self, smooth=0.95):

        v1 = 20 / self.fps
        v2 = 900 / self.fps
        d1 = 0.042
        d2 = -0.396

        err_slope = (d2 - d1) / (v2 - v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope * v1)

        interval = 1 / (correction_slope * self.fps)

        print("Start {}".format(gyro_start))

        print("Interval {}, slope {}".format(interval, correction_slope))

        self.times, self.stab_transform = self.integrator.get_interpolated_stab_transform(
            smooth=smooth, start=-gyro_start,
            interval=interval)  # 2.2/30 , -1/30

    def auto_sync_stab(self,
                       smooth=0.8,
                       sliceframe1=10,
                       sliceframe2=1000,
                       slicelength=50):
        v1 = (sliceframe1 + slicelength / 2) / self.fps
        v2 = (sliceframe2 + slicelength / 2) / self.fps
        d1, times1, transforms1 = self.optical_flow_comparison(
            sliceframe1, slicelength)
        d2, times2, transforms2 = self.optical_flow_comparison(
            sliceframe2, slicelength)

        print("v1: {}, v2: {}, d1: {}, d2: {}".format(v1, v2, d1, d2))

        err_slope = (d2 - d1) / (v2 - v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope * v1) - 0.4 / self.fps

        interval = 1 / (correction_slope * self.fps)

        print("Start {}".format(gyro_start))

        print("Interval {}, slope {}".format(interval, correction_slope))

        plt.plot(times1, transforms1[:, 2])
        plt.plot(times2, transforms2[:, 2])
        plt.plot((self.integrator.get_raw_data("t") + gyro_start) *
                 correction_slope, self.integrator.get_raw_data("z"))
        plt.plot((self.integrator.get_raw_data("t") + d2),
                 self.integrator.get_raw_data("z"))
        plt.plot((self.integrator.get_raw_data("t") + d1),
                 self.integrator.get_raw_data("z"))
        plt.show()

        self.times, self.stab_transform = self.integrator.get_interpolated_stab_transform(
            smooth=smooth, start=-gyro_start,
            interval=interval)  # 2.2/30 , -1/30

    def optical_flow_comparison(self, start_frame=0, analyze_length=50):
        frame_times = []
        frame_idx = []
        transforms = []

        self.cap.set(cv2.CAP_PROP_POS_FRAMES, start_frame)

        # Read first frame
        _, prev = self.cap.read()
        prev_gray = cv2.cvtColor(prev, cv2.COLOR_BGR2GRAY)

        for i in range(analyze_length):
            prev_pts = cv2.goodFeaturesToTrack(prev_gray,
                                               maxCorners=200,
                                               qualityLevel=0.01,
                                               minDistance=30,
                                               blockSize=3)

            succ, curr = self.cap.read()

            frame_id = (int(self.cap.get(cv2.CAP_PROP_POS_FRAMES)))
            frame_time = (self.cap.get(cv2.CAP_PROP_POS_MSEC) / 1000)

            if i % 10 == 0:
                print("Analyzing frame: {}/{}".format(i, analyze_length))

            if succ:
                # Only add if succeeded
                frame_idx.append(frame_id)
                frame_times.append(frame_time)
                print(frame_time)

                curr_gray = cv2.cvtColor(curr, cv2.COLOR_BGR2GRAY)
                # Estimate transform using optical flow
                curr_pts, status, err = cv2.calcOpticalFlowPyrLK(
                    prev_gray, curr_gray, prev_pts, None)

                idx = np.where(status == 1)[0]
                prev_pts = prev_pts[idx]
                curr_pts = curr_pts[idx]
                assert prev_pts.shape == curr_pts.shape

                # TODO: Try getting undistort + homography working for more accurate rotation estimation
                #src_pts = undistort.undistort_points(prev_pts, new_img_dim=(int(width),int(height)))
                #dst_pts = undistort.undistort_points(curr_pts, new_img_dim=(int(width),int(height)))
                #H, mask = cv2.findHomography(src_pts, dst_pts)
                #retval, rots, trans, norms = undistort.decompose_homography(H, new_img_dim=(int(width),int(height)))

                m, inliers = cv2.estimateAffine2D(prev_pts, curr_pts)

                dx = m[0, 2]
                dy = m[1, 2]

                # Extract rotation angle
                da = np.arctan2(m[1, 0], m[0, 0])
                transforms.append([dx, dy, da])
                prev_gray = curr_gray

            else:
                print("Frame {}".format(i))

        transforms = np.array(transforms) * self.fps
        estimated_offset = self.estimate_gyro_offset(frame_times, transforms)
        return estimated_offset, frame_times, transforms

        # Test stuff
        v1 = 20 / self.fps
        v2 = 1300 / self.fps
        d1 = 0.042
        d2 = -0.604

        err_slope = (d2 - d1) / (v2 - v1)
        correction_slope = err_slope + 1
        gyro_start = (d1 - err_slope * v1)

        interval = correction_slope * 1 / self.fps

        plt.plot(frame_times, transforms[:, 2])
        plt.plot((self.integrator.get_raw_data("t") + gyro_start) *
                 correction_slope, self.integrator.get_raw_data("z"))
        plt.show()

    def estimate_gyro_offset(self, OF_times, OF_transforms):
        # Estimate offset between small optical flow slice and gyro data

        gyro_times = self.integrator.get_raw_data("t")
        gyro_roll = self.integrator.get_raw_data("z")
        OF_roll = OF_transforms[:, 2]

        costs = []
        offsets = []

        for i in range(800):
            offset = 0.8 - i / 500
            cost = self.gyro_cost_func(OF_times, OF_roll, gyro_times + offset,
                                       gyro_roll)
            offsets.append(offset)
            costs.append(cost)

        final_offset = offsets[np.argmin(costs)]

        print("Estimated offset: {}".format(final_offset))

        plt.plot(offsets, costs)
        plt.show()

        return final_offset

    def gyro_cost_func(self, OF_times, OF_roll, gyro_times, gyro_roll):
        sum_squared_diff = 0
        gyro_idx = 0

        for OF_idx in range(len(OF_times)):
            while gyro_times[gyro_idx] < OF_times[OF_idx]:
                gyro_idx += 1

            diff = gyro_roll[gyro_idx] - OF_roll[OF_idx]
            sum_squared_diff += diff**2
            #print("Gyro {}, OF {}".format(gyro_times[gyro_idx], OF_times[OF_idx]))

        #print("DIFF^2: {}".format(sum_squared_diff))

        #plt.plot(OF_times, OF_roll)
        #plt.plot(gyro_times, gyro_roll)
        #plt.show()
        return sum_squared_diff

    def renderfile(self, outpath="Stabilized.mp4", out_size=(1920, 1080)):

        out = cv2.VideoWriter(outpath, -1, 29.97, (1920 * 2, 1080))
        crop = (int((self.width - out_size[0]) / 2),
                int((self.height - out_size[1]) / 2))

        self.cap.set(cv2.CAP_PROP_POS_FRAMES, 21 * 30)

        i = 0
        while (True):
            # Read next frame
            success, frame = self.cap.read()

            frame_num = int(self.cap.get(cv2.CAP_PROP_POS_FRAMES))
            print("FRAME: {}, IDX: {}".format(frame_num, i))

            if success:
                i += 1

            if i > 1000:
                break

            if success and i > 0:

                frame_undistort = cv2.remap(frame,
                                            self.map1,
                                            self.map2,
                                            interpolation=cv2.INTER_LINEAR,
                                            borderMode=cv2.BORDER_CONSTANT)

                frame_out = self.undistort.get_rotation_map(
                    frame_undistort, self.stab_transform[frame_num - 1])

                # Fix border artifacts
                frame_out = frame_out[crop[1]:crop[1] + out_size[1],
                                      crop[0]:crop[0] + out_size[0]]
                frame_undistort = frame_undistort[crop[1]:crop[1] +
                                                  out_size[1],
                                                  crop[0]:crop[0] +
                                                  out_size[0]]

                #out.write(frame_out)
                #print(frame_out.shape)

                # If the image is too big, resize it.
                #%if(frame_out.shape[1] > 1920):
                #		frame_out = cv2.resize(frame_out, (int(frame_out.shape[1]/2), int(frame_out.shape[0]/2)));

                size = np.array(frame_out.shape)
                frame_out = cv2.resize(frame_out, (int(size[1]), int(size[0])))

                frame = cv2.resize(frame_undistort,
                                   ((int(size[1]), int(size[0]))))
                concatted = cv2.resize(cv2.hconcat([frame_out, frame], 2),
                                       (1920 * 2, 1080))
                out.write(concatted)
                cv2.imshow("Before and After", concatted)
                cv2.waitKey(5)

        # When everything done, release the capture
        out.release()

    def release(self):
        self.cap.release()
Ejemplo n.º 11
0
#bb = BlackboxExtractor("test_clips/GX015563.MP4_emuf_004.bbl")

#gyro_data = bb.get_gyro_data()
gyro_data = gpmf.get_gyro(True)

#gyro_data[:,[2, 3]] = gyro_data[:,[3, 2]]
gyro_data[:, 1] = gyro_data[:, 1]
gyro_data[:, 2] = -gyro_data[:, 2]
gyro_data[:, 3] = gyro_data[:, 3]

#gyro_data[:,1:] = -gyro_data[:,1:]

initial_orientation = Rotation.from_euler('xyz', [0, 0, 0],
                                          degrees=True).as_quat()

integrator = GyroIntegrator(gyro_data, initial_orientation=initial_orientation)
integrator.integrate_all()

v1 = 3.4
v2 = 6.179
g1 = 3.4
g2 = 6.263

gyroslope = (g2 - g1) / (v2 - v1)

gStart = g1 - gyroslope * v1

interval = gyroslope * 1 / 59.94

print("Start {}".format(gStart))