def decode_from_fec(k, m, block_size, blocks, data): decoded_data = fec_decode(k, m, block_size, blocks) if decoded_data == 0: print "Decoded from FEC Data blocks: " #, len(blocks), "Block content", blocks if check_decode_success(blocks, data, block_size) == False: print "Error Decoding FEC data" sys.exit(0) print "Sending to reconstruct rtmp: " construct_rtmp(blocks)
def decode(self, strips): """ Decodes strips of data encoded using the PylonghairDriver's encode method """ block_size = len(strips[len(strips) - 1]) blocks = [] number_of_data_blocks = len(strips) - self.m data_blocks = strips[:number_of_data_blocks] for row, data in enumerate(data_blocks): blocks.append((row, data)) assert fec_decode(number_of_data_blocks, self.m, block_size, blocks) == 0 return "".join([strip[1] for strip in blocks])
def try_decode(frame): global sequence global parity global in_progress global par_progress global m global k print "try decode for frame" , frame , " current queue status is ", in_progress if in_progress[frame]+ par_progress[frame] < k: print "decode failed becuase only have " , in_progress[frame]+ par_progress[frame] , "blocks instead of 16" return False else: blocks =[] for index,block in enumerate(sequence[frame]): if block!= index: blocks+=block for index,parity in enumerate(parity[frame]): if parity!=index: blocks+=parity print fec_decode(k,m,512,blocks) print blocks # TODO add back the blocks into sequnce to construct the frame here return True
def test_fec_decode_without_erasure(self): block_size = 8192 k = 10 m = 4 data = os.urandom(k * block_size) parity = bytearray(m * block_size) assert fec_encode(k, m, block_size, data, parity) == 0 blocks = [] for row in range(k): offset = row * block_size block_data = data[offset:offset + block_size] blocks.append((row, block_data)) assert fec_decode(k, m, block_size, blocks) == 0
def test_fec_decode_without_sufficient_blocks(self): block_size = 8192 k = 10 m = 4 data = os.urandom(k * block_size) parity = bytearray(m * block_size) assert parity == b'\x00' * (m * block_size) # print("original data:%r" % data[9 * block_size:9 * block_size + block_size]) assert fec_encode(k, m, block_size, data, parity) == 0 # print("parity data:%r" % parity[:block_size]) blocks = [] for row in range(k-1): offset = row * block_size block_data = data[offset:offset + block_size] blocks.append((row, block_data)) blocks.append((k, bytearray(parity[:block_size]))) assert fec_decode(k, m, block_size, blocks) == 0 offset = (k-1) * block_size assert blocks[(k-1)][1] == data[offset:offset + block_size]
#print len(data) #cauchy_256_encode(k, m, data_ptrs, recovery_blocks, bytes) #_fec_encode(int k, int m, int block_size, unsigned char* data, unsigned char* parity) # print fec_encode(k, m, block_size, data, parity) #for par in parity: # print par count += 1 #print len(parity) blocks = [] # artifica=ially removed the first byte blocks.append((0, bytearray(parity[0:block_size]))) # add the remaining blocks to the transfer for row in range(k - 1): row += 1 offset = row * block_size block_data = data[offset:offset + block_size] blocks.append((row, block_data)) fec_decode(k, m, block_size, blocks) offset = 0 print "First decoded block*********************\n" print blocks[0][1] print "All blocks*********************\n" for x in blocks: print "**", x print "Data block*********************\n" print data[offset:offset + block_size] print "End*********************\n"