def compress_key_fvspeck(path, dest_path, data_path, dec_path, dec_level): if path[-1] != "/": path += "/" info = pickle.load(open(path + "header.dat")) if not os.path.exists(dest_path): os.makedirs(dest_path) if not os.path.exists(dec_path): os.makedirs(dec_path) header = MainHeader() header.frames = info.frames header.ext = '.dat' codec = sk.ar_fvspeck() for c in range(info.frames): frame = cv2.imread(path + str(c) + info.ext, cv2.CV_LOAD_IMAGE_GRAYSCALE) h264data = pickle.load(open(data_path + str(c) + ".hdr")) rows = frame.shape[0] cols = frame.shape[1] frame = tls.zero_padding(frame) wavelet = wave.cdf97(frame, dec_level) wavelet = tls.quantize(wavelet, 1000, dtype=int) bpp = h264data.abac_size / (rows * cols) print bpp center = (int(rows / 2), int(cols / 2)) coded_frame = codec.compress(wavelet, bpp, 0.006, center, 0.3, 1, 1) coded_frame['real_cols'] = cols coded_frame['real_rows'] = rows wvlt = codec.clone iframe = wave.icdf97(wvlt) iframe = tls.quantize(iframe, 0.001) iframe = tls.unpadding(frame, (rows, cols)) iframe2 = tls.normalize(iframe, upper_bound=255, dtype=np.uint8) if not cv2.imwrite(dec_path + str(c) + ".png", iframe2, [cv2.cv.CV_IMWRITE_PNG_COMPRESSION, 0]): print "Failed to create: " + dest_path + str(c) + ".png" pickle.dump(iframe, open(dest_path + str(c) + ".npy", "w")) try: pickle.dump(coded_frame, open(dest_path + str(c) + header.ext, "wb")) except: print "Failed to create: " + dest_path + str(c) + header.ext pickle.dump(header, open(dest_path + "header.dat", "w")) pickle.dump(header, open(dec_path + "header.dat", "w"))
def compress_key_fvspeck(path, dest_path, data_path, dec_path, dec_level): if path[-1] != "/": path += "/" info = pickle.load(open(path + "header.dat")) if not os.path.exists(dest_path): os.makedirs(dest_path) if not os.path.exists(dec_path): os.makedirs(dec_path) header = MainHeader() header.frames = info.frames header.ext = '.dat' codec = sk.ar_fvspeck() for c in range(info.frames): frame = cv2.imread(path + str(c) + info.ext, cv2.CV_LOAD_IMAGE_GRAYSCALE) h264data = pickle.load(open(data_path + str(c) + ".hdr")) rows = frame.shape[0] cols = frame.shape[1] frame = tls.zero_padding(frame) wavelet = wave.cdf97(frame, dec_level) wavelet = tls.quantize(wavelet, 1000, dtype=int) bpp = h264data.abac_size / (rows * cols) print bpp center = (int(rows / 2), int(cols / 2)) coded_frame = codec.compress(wavelet, bpp, 0.006, center, 0.3, 1, 1) coded_frame['real_cols'] = cols coded_frame['real_rows'] = rows wvlt = codec.clone iframe = wave.icdf97(wvlt) iframe = tls.quantize(iframe, 0.001) iframe = tls.unpadding(frame, (rows, cols)) iframe2 = tls.normalize(iframe, upper_bound=255, dtype=np.uint8) if not cv2.imwrite(dec_path + str(c) + ".png", iframe2, [cv2.cv.CV_IMWRITE_PNG_COMPRESSION, 0]): print "Failed to create: " + dest_path + str(c) + ".png" pickle.dump(iframe, open(dest_path +str(c) + ".npy","w")) try: pickle.dump(coded_frame, open(dest_path + str(c) + header.ext, "wb")) except: print "Failed to create: " + dest_path + str(c) + header.ext pickle.dump(header, open(dest_path + "header.dat", "w")) pickle.dump(header, open(dec_path + "header.dat", "w"))
def compress_error_fvspeck(path, dest_path, data_path, dec_path, dec_level): if path[-1] != "/": path += "/" info = pickle.load(open(path + "header.dat")) if not os.path.exists(dest_path): os.makedirs(dest_path) if not os.path.exists(dec_path): os.makedirs(dec_path) header = MainHeader() header.frames = info.frames header.ext = '.dat' codec = sk.ar_fvspeck() for c in range(1, info.frames): frame = np.load(path + str(c) + ".npy") h264data = pickle.load(open(data_path + str(c) + ".hdr")) rows = frame.shape[0] cols = frame.shape[1] frame = tls.zero_padding(frame) wavelet = wave.cdf97(frame) wavelet = tls.quantize(wavelet, 1000, dtype=int) bpp = h264data.abac_size / (rows * cols) print bpp center = (int(rows / 2), int(cols / 2)) coded_frame = codec.compress(wavelet, bpp, 0.006, center, 0.3, 1, 1) coded_frame['real_cols'] = cols coded_frame['real_rows'] = rows wvlt = codec.clone iframe = wave.icdf97(wvlt) iframe = tls.quantize(iframe, 0.001) iframe = tls.unpadding(frame, (rows, cols)) try: pickle.dump(coded_frame, open(dest_path + str(c) + header.ext, "wb")) pickle.dump(iframe, open(dec_path + str(c) + ".npy", "wb")) print dec_path + str(c) + ".npy" except: print "Failed to create: " + dest_path + str(c) + header.ext pickle.dump(header, open(dest_path + "header.dat", "w")) pickle.dump(header, open(dec_path + "header.dat", "w"))
def test_speck(path, dest_path, dec_level): if path[-1] != "/": path += "/" if not os.path.exists(dest_path): os.makedirs(dest_path) codec = sk.speck() frame = cv2.imread(path + str(0) + ".png", cv2.CV_LOAD_IMAGE_GRAYSCALE) rows = frame.shape[0] cols = frame.shape[1] frame = tls.zero_padding(frame) wavelet = wave.cdf97(frame, dec_level) wavelet = tls.quantize(wavelet, 1000, dtype=int) center = (int(rows / 2), int(cols / 2)) coded_frame = codec.compress(wavelet, 3) coded_frame['real_cols'] = cols coded_frame['real_rows'] = rows wvlt = codec.clone iframe = wave.icdf97(wvlt) iframe = tls.quantize(iframe, 0.001) iframe = tls.unpadding(iframe, (rows, cols)) iframe2 = tls.normalize(iframe, upper_bound=255, dtype=np.uint8) cv2.imwrite(dest_path + "plain5_2a.png", iframe2, [cv2.cv.CV_IMWRITE_PNG_COMPRESSION, 0]) pickle.dump(iframe, open(dest_path + "plain5_2.npy", "w"))
def test_cdf97(self): signal = np.ones((2**6, 2**6)) wavelet = wave.cdf97(signal) isignal = wave.icdf97(wavelet) npt.assert_array_almost_equal(signal, isignal, 6)
def decompress_fvspeck(qwave): wavelet = tls.quantize(qwave, 0.001) img = wave.icdf97(wavelet) return img
def test_cdf97(self): signal = np.ones((2 ** 6, 2 ** 6)) wavelet = wave.cdf97(signal) isignal = wave.icdf97(wavelet) npt.assert_array_almost_equal(signal, isignal, 6)