def compress_fullsearch(path, dest_path, macroblock_size=8, fixed_keyframe=0): if path[-1] != "/": path += "/" info = pickle.load(open(path + "info.dat", "r")) is_key = 0 info.fixed_keyframe = fixed_keyframe info.full_size = 100 if not os.path.exists(dest_path): os.makedirs(dest_path) info.macroblock_size = macroblock_size info.frames = 30 for c in range(info.frames): frame = cv2.imread(path + str(c) + ".png", cv2.CV_LOAD_IMAGE_GRAYSCALE) if is_key == 0: if not cv2.imwrite(dest_path + str(c) + ".png", frame, [cv2.cv.CV_IMWRITE_PNG_COMPRESSION, 0]): print "Failed to create: " + dest_path + str(c) + ".png" is_key = fixed_keyframe - 1 key_frame = frame info.motion_vectors += [0] # pickle.dump(info, open(dest_path + "info.dat", "w")) else: error, mvs = intraframe.encode_motion_frame(frame, key_frame, macroblock_size, info.full_size) if not cv2.imwrite(dest_path + str(c) + ".png", error, [cv2.cv.CV_IMWRITE_PNG_COMPRESSION, 0]): print "Failed to create: " + dest_path + str(c) + ".png" info.motion_vectors += [(mvs)] is_key -= 1 pickle.dump(info, open(dest_path + "info.dat", "w"))
import intraframe as ifr import numpy as np frame = np.ones((20, 20)) * 2 keyframe = np.ones((20, 20)) * 2 frame[0:3, 0:3] = 1 keyframe[2:5, 2:5] = 1 error, m_vs = ifr.encode_motion_frame(frame, keyframe, (2, 2)) deframe = ifr.decode_motion_frame(error, m_vs, (2, 2), keyframe) print deframe
def compress_motion_speck(path, dest_path, bpp, dec_level=4, macroblock_size=8, fixed_keyframe=0): """This method compress a image sequence from a directory using speck and motion compensation. Args: path: The directory where the image sequence to be encoded is stored dest_path: A destination directory where the encoded frames will be stored bpp: Compression ratio on bits per pixel Kwargs: dec_level: Level of wavelet decomposition to be used macroblock_size: size of the macroblock used for motion compensation fixed_keyframe: size of the Group of Pictures """ if path[-1] != "/": path += "/" info = pickle.load(open(path + "info.dat", "r")) is_key = 0 info.fixed_keyframe = fixed_keyframe info.full_size = 1 if not os.path.exists(dest_path): os.makedirs(dest_path) info.macroblock_size = macroblock_size codec = sk.speck() info.wavelet = "cdf97" info.wavelet_level = dec_level info.full_size = 100 for c in range(info.frames): original_frame = cv2.imread(path + str(c) + ".png", cv2.CV_LOAD_IMAGE_GRAYSCALE) info.cols = original_frame.shape[1] info.rows = original_frame.shape[0] frame = tools.zero_padding(original_frame) info.wavelet_cols = frame.shape[1] info.wavelet_rows = frame.shape[0] if is_key == 0: wavelet = lwt.cdf97(frame, dec_level) wavelet = tools.quant(wavelet, 0.0001) coded_frame = codec.compress(wavelet, bpp) stream = dict() stream["wise_bit"] = coded_frame[3] stream["payload"] = coded_frame[4] try: pickle.dump(stream, open(dest_path + str(c) + ".speck", "wb")) except: print "Failed to create: " + dest_path + str(c) + ".png" iwave = codec.expand(coded_frame[4], frame.shape[1], frame.shape[0], dec_level, coded_frame[3]) iframe = lwt.icdf97(iwave) is_key = fixed_keyframe - 1 key_frame = iframe info.motion_vectors += [0] else: p_frame, mvs = intraframe.encode_motion_frame(frame, key_frame, macroblock_size, info.full_size) info.motion_vectors += [(mvs)] is_key -= 1 wavelet = lwt.cdf97(p_frame, dec_level) wavelet = tools.quant(wavelet, 0.0001) coded_frame = codec.compress(wavelet, bpp) stream = dict() stream["wise_bit"] = coded_frame[3] stream["payload"] = coded_frame[4] try: pickle.dump(stream, open(dest_path + str(c) + ".speck", "wb")) except: print "Failed to create: " + dest_path + str(c) + ".png" pickle.dump(info, open(dest_path + "info.dat", "w"))