def TroubleshootVideoClip(calibration_set_pattern): videoin = dataf + 'project_video.mp4' clip = VideoFileClip(videoin, audio=False) original = clip.make_frame(41.4) img_size = (original.shape[1], original.shape[0]) processing = ImageProcessing(img_size, calibration_set_pattern) processing.Demo(original)
def ProcessVideoClip(calibration_set_pattern): videoin = dataf + 'project_video.mp4' clip = VideoFileClip(videoin, audio=False) #.subclip(37,43) original = clip.make_frame(0) img_size = (original.shape[1], original.shape[0]) print(img_size) processing = ImageProcessing(img_size, calibration_set_pattern) processing.PrepareDetection(original) def process_clip_frame(image): return processing.UseSmartLocate(image) result = processing.UseSmartLocate(original) lane_found_clip = clip.fl_image(process_clip_frame) lane_found_clip.write_videofile('out/lane_detected.mp4', audio=False)
# draw_img = cv2.addWeighted(draw_img, 1, heatmap*6, 0.9, 0) framecount+=1 return draw_img global heatmaps, framecount framecount=0 heatmaps = collections.deque(maxlen=6) #clip1 = VideoFileClip("test_video.mp4").fl_image(pipeline) clip2 = VideoFileClip("project_video.mp4").fl_image(pipeline) #check some frames time_start=15 fps=int(clip2.fps) for i in range(fps*10): time_current=time_start+i/fps video_frame=clip2.make_frame(time_current) if (framecount%fps)==0: plt.imshow(video_frame) plt.title(str(time_current)) plt.show() #save #clip1.write_videofile('video_out_test.mp4', audio=False) #clip2.write_videofile('video_out.mp4', audio=False) #cv2.imwrite('test9.jpg',cv2.cvtColor(clip2.make_frame(26),cv2.COLOR_RGB2BGR)) #visualize clip3 = VideoFileClip("project_video.mp4") plt.figure(figsize=(12,12)) heatmaps2 = collections.deque(maxlen=6) for i in range(6): img =clip3.make_frame(40+i*0.2)
img_out=process_image(img_in) plt.title(image_test) plt.imshow(img_out) plt.show() plt.savefig('output_images/example_output.jpg') #### output video #### from moviepy.editor import VideoFileClip video_output = 'video_out7.mp4' clip1 = VideoFileClip("project_video.mp4") clip_out = clip1.fl_image(process_image) #NOTE: this function expects color images!! clip_out.write_videofile(video_output, audio=False, threads=2) video_output_challenge = 'video_out_challenge2.mp4' clip2 = VideoFileClip("challenge_video.mp4") clip_out_challenge = clip2.fl_image(process_image) #NOTE: this function expects color images!! clip_out_challenge.write_videofile(video_output_challenge, audio=False, threads=2) #check a specific frame def movie_frame(time_s=10): f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12,10)) ax1.imshow(clip1.make_frame(time_s)) ax2.imshow(clip_out.make_frame(time_s)) movie_frame(40.9) cv2.imwrite('test7.jpg',cv2.cvtColor(clip1.make_frame(22.53),cv2.COLOR_RGB2BGR)) cv2.imwrite('test8.jpg',cv2.cvtColor(clip1.make_frame(41.9),cv2.COLOR_RGB2BGR))