def add_index(num_oligos,BCH_bits,infile_name,outfile_name):
	'''
	Generate DNA encoding of all indexes from 0 to num_oligos-1, each protected with BCH_bits protection.
	The index DNA strings are appropriately concatenated with corresponding lines from infile_name and the resulting oligos are written to outfile_name, one line per index.
	Throw error if num_oligos > 2**24 = 16777216.
	'''
	if num_oligos > MAX_OLIGOS_NO_RLL:
		raise Exception('Too many oligos')
	block_len = index_block_len_noRLL
	bin_block_len = 2*block_len

	if BCH_bits != 0:
		bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_bits)
		# calculate number of bases used for index
		num_bases_BCH = BCH_bits*BCH_bits_per_error/2
		num_bases_index = block_len + num_bases_BCH
	index = 0
	with open(infile_name) as infile, open(outfile_name, 'w') as outfile:
		for line in infile:
			index_prp = (prp_a*index+prp_b)%MAX_OLIGOS_NO_RLL
			bin_string = bin(index_prp)[2:].zfill(bin_block_len)
			dna = bin2dna_2bpb(bin_string)
			if BCH_bits != 0:
				bits_ecc = bytes_to_binary_string(bch.encode(binary_string_to_bytes(bin_string)))
				bits_ecc = bits_ecc[:BCH_bits*BCH_bits_per_error]
				dna_ecc = bin2dna_2bpb(bits_ecc)
				outfile.write(dna+dna_ecc+line)
			else:
				outfile.write(dna+line)
			index += 1
			if index == num_oligos:
				break
Beispiel #2
0
def encode(input_file, output_file, BCH_bits, alpha_raptor):
	encode_raptor_script = "./rq --debug encode "   
	raptor_length = (31-BCH_bits) - (31-BCH_bits)%4 # RaptorQ expects multiples of 4
	arg_string  = " -s " + str(raptor_length)
	arg_string += " -m " + str(1000000)
	arg_string += " --repair-symbols-rate " + str(alpha_raptor)
	arg_string += " "
	arg_string += input_file + " "
	arg_string += "tmpfile"
	encode_raptor_command = encode_raptor_script + arg_string
	subprocess.call([encode_raptor_command], shell=True)
	assert os.path.isfile("tmpfile"),"The codebook did not get generated"
	
	bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_bits)
	f_input = open("tmpfile", "r");
	data = json.load(f_input)
	f_input.close()
	for i,s in enumerate(data['symbols']):
		s_byte = binascii.unhexlify(((hex(int(s[1],2)))[2:-1]).zfill(2*data['symbol_size']))
		s_byte_coded = s_byte + bch.encode(s_byte)
		data['symbols'][i][1] = bin(int(binascii.hexlify(s_byte_coded), 16))[2:].zfill((data['symbol_size']+BCH_bits)*8)
	
	data['BCH_bits'] = BCH_bits	
	f_out = open(output_file,'w')
	data = json.dumps(data, sort_keys=True, indent=2, separators=(',', ': '))
	f_out.write(data)
	f_out.close()	
Beispiel #3
0
def pdf1_to_bch1(pdf1, bch1):
    #valid pdf1
    binary_data_pdf1 = newdata = pdf1

    correctedbch1 = bch1
    bch1 = bch1 + '000'
    #print('valid pdf1',len(binary_data_pdf1),binary_data_pdf1)

    BCH_POLYNOMIAL = 137  #137
    BCH_BITS = 3
    bitflips = 0
    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)
    data = bytearray(bitstring_to_bytes(binary_data_pdf1))
    print('m:', bch.m)
    rebuildpdf = ''
    for e in range(len(data)):
        segment = decodefunctions.dec2bin(data[e]).zfill(8)
        rebuildpdf = rebuildpdf + segment
        print(e, data[e], segment)

    #print(binary_data_pdf1)
    print(rebuildpdf, len(rebuildpdf), binary_data_pdf1 == rebuildpdf)
    ecc = bch.encode(data)
    print(len(ecc))
    bchstring = ''
    for e in ecc:
        binchar = decodefunctions.dec2bin(e)
        print(e, binchar)
        bchstring = bchstring + binchar

    # create array of ecc provide by bch
    ecc_provided = bytearray(bitstring_to_bytes(bch1))
    packet = data + ecc_provided
    bchstr2 = ''
    print(len(data), len(ecc), len(packet))
    print('ecc included:', ecc_provided, len(ecc_provided), type(ecc_provided))
    print('ecc calc:', ecc, len(ecc), type(ecc))
    print('match', ecc == ecc_provided)

    if ecc != ecc_provided:
        #packet = data + ecc
        data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]
        #correct
        bitflips = bch.decode_inplace(data, ecc)
        print('bitflips: %d' % (bitflips))
        newdata = decodefunctions.dec2bin(data[0])
        for e in data[1:]:
            binchar = decodefunctions.dec2bin(e).zfill(8)
            #print(e, binchar)
            newdata = newdata + binchar

        correctedbch1 = ''
        for e in ecc:
            binchar = decodefunctions.dec2bin(e).zfill(8)
            #print(e, binchar)
            correctedbch1 = correctedbch1 + binchar
        if (len(correctedbch1)) > 21:
            correctedbch1 = correctedbch1[:21]

    return (bitflips, newdata, correctedbch1)
def hex_bch(hex):
    b = hextobin(hex).zfill(255)
    packet = bytearray(bitstring_to_bytes2(b))
    bch = bchlib.BCH(487, 6 , False)
    data_only, ecc = packet[:-6], packet[-6:]  # data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]

    # correct
    corrected_hex =''
    bitflips = 0

    try:
        bitflips = bch.decode_inplace(data_only, ecc)
        print('\nbitflips: %d' % (bitflips))
        # packetize
        packet = data_only + ecc
        correct_final=''
        for e in packet:
            c = dec2bin(e).zfill(8)
            correct_final = correct_final + c
        corrected_hex = bin2hex(correct_final)
        if bitflips:
            success = True
        else:
            success = False
    except:
        success = False
    return (hex,corrected_hex)
def make_full(hexinput):
    pdf1 = decodefunctions.hextobin(hexinput)
    b = pdf1[:204] + '0' * 48
    print(b, len(b))
    ecc = calcBCH(b, 0, 202, 250)
    newhex = decodefunctions.bin2hex(pdf1[:204] + ecc)
    print(newhex)
    print(ecc, decodefunctions.bin2hex(ecc))
    BCH_POLYNOMIAL = 285  # 285
    BCH_BITS = 6
    bitflips = 0
    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

    max_data_len = bch.n // 8 - (bch.ecc_bits + 7) // 8

    data = bytearray(bch1correct.bitstring_to_bytes(pdf1[:204]))
    necc = bch.encode(data)
    bchstring = ''

    for e in necc:
        binchar = decodefunctions.dec2bin(e).zfill(8)
        bchstring = bchstring + binchar
    print(bchstring, decodefunctions.bin2hex(bchstring))

    newhex = decodefunctions.bin2hex(pdf1[:204] + bchstring)

    return newhex
    def __init__(self, p, t):
        self.bch_polynomial = p
        self.bch_bits = t
        self.bitflips = 0

        # utworzenie obiektu klasy z biblioteki bchlib
        self.obj = bchlib.BCH(self.bch_polynomial, self.bch_bits)
Beispiel #7
0
 def __init__(self, witness_nbits: int, t: int,
              extractor: Optional[Callable[[K], BitVector]] = None) -> None:
     self.t = t
     self.k = witness_nbits
     if extractor is None:
         extractor = self.byte_extractor
     self.extractor = extractor
     self.bch = bchlib.BCH(BCH_POLYNOMIAL, self.t)
     self.n = self.k + self.bch.ecc_bits  # systematic code
Beispiel #8
0
def ecc_correct(block, code, ecc_strength):
    """
    Try to fix `block` with `code`, for an ECC size of `bitsize`.
    """
    ecc = bchlib.BCH(8219, ecc_strength, reverse=True)
    try:
        block_ = ecc.correct(bytes(block), bytes(code))
        return block_
    except Exception as e:
        return block
Beispiel #9
0
    def decodeBCH(self, packet):
        bch = bchlib.BCH(configBCHPolynomial, configBCHBits)
        data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]

        try:
            bitflips, corrected, ecc = bch.decode(data, ecc)
        except:
            print("Error during decoding!")
            return bytearray()
        return corrected
Beispiel #10
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('model', type=str)
    parser.add_argument('--image', type=str, default=None)
    parser.add_argument('--images_dir', type=str, default=None)
    parser.add_argument('--secret_size', type=int, default=128)
    args = parser.parse_args()

    if args.image is not None:
        files_list = [args.image]
    elif args.images_dir is not None:
        files_list = glob.glob(args.images_dir + '/*')
    else:
        print('Missing input image')
        return

    sess = tf.InteractiveSession(graph=tf.Graph())

    model = tf.saved_model.loader.load(sess, [tag_constants.SERVING], args.model)

    input_image_name = model.signature_def[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].inputs['image'].name
    input_image = tf.get_default_graph().get_tensor_by_name(input_image_name)

    output_secret_name = model.signature_def[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY].outputs['decoded'].name
    output_secret = tf.get_default_graph().get_tensor_by_name(output_secret_name)

    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

    for filename in files_list:
        image = Image.open(filename).convert("RGB")
        image = np.array(ImageOps.fit(image,(512, 512)),dtype=np.float32)
        image /= 255.

        feed_dict = {input_image:[image]}

        secret = sess.run([output_secret],feed_dict=feed_dict)[0][0]

        packet_binary = "".join([str(int(bit)) for bit in secret])
        packet = bytes(int(packet_binary[i : i + 8], 2) for i in range(0, len(packet_binary), 8))
        packet = bytearray(packet)

        data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]
        print(data)
        print(ecc)
        bitflips = bch.decode_inplace(data, ecc)

        if bitflips != -1:
            try:
                code = data.decode("utf-8")
                print(filename, code)
                continue
            except:
                continue
        print(filename, 'Failed to decode')
Beispiel #11
0
    def encryptBCH(selfself, array):
        polynomial = 8219
        t = 2
        bch = bchlib.BCH(polynomial, t)

        data = bytearray(array)

        code = bch.encode(data)
        result = data + code
        result = list(result)
        return result
    def encode(self, input_raw_frames, asecret):
        width = FrameSize.WIDTH.value
        height = FrameSize.HEIGHT.value

        bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

        if len(asecret) > 7:
            print('Error: Can only encode 56bits (7 characters) with ECC')
            return

        data = bytearray(asecret + ' ' * (7 - len(asecret)), 'utf-8')
        ecc = bch.encode(data)
        packet = data + ecc

        packet_binary = ''.join(format(x, '08b') for x in packet)
        secret = [int(x) for x in packet_binary]
        secret.extend([0, 0, 0, 0])

        encoded_frames = []
        residual_frames = []

        # the input frame has already been set to RGB
        for image in input_raw_frames:
            # image = cv2.imread(frame)
            # image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
            image = cv2.resize(image, (width, height))
            image = image.astype(np.float32)
            image /= 255.

            feed_dict = {
                self.encode_input_secret: [secret],
                self.encode_input_image: [image]
            }

            hidden_img, residual = self.encode_sess.run(
                [self.encode_output_stegastamp, self.encode_output_residual],
                feed_dict=feed_dict)

            rescaled = (hidden_img[0] * 255).astype(np.uint8)
            encoded_frame = cv2.cvtColor(np.asarray(rescaled),
                                         cv2.COLOR_RGB2BGR)
            encoded_frames.append(encoded_frame)

            # get the residual image
            residual = residual[0] + .5
            residual = (residual * 255).astype(np.uint8)
            residual = cv2.cvtColor(np.squeeze(np.array(residual)),
                                    cv2.COLOR_BGR2RGB)
            residual_frames.append(residual)
        if self.return_residual is True:
            return encoded_frames, residual_frames
        else:
            return encoded_frames
def performBCHCorrection(extractedPacket):
    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)
    newData, newEcc = extractedPacket[:-bch.ecc_bytes], extractedPacket[
        -bch.ecc_bytes:]
    try:
        bitflips = bch.decode_inplace(newData, newEcc)
        #print('bitflips: %d' % (bitflips))
        #print("Here is the decrypted message: ", newData.decode('utf-8'))
        return newData.decode('utf-8').rstrip(' \n')
    except:
        #print("Issues with decoding data, here's what was recovered: ", newData)
        return newData
def setupBCH(msg):

    # create a bch object
    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)
    data = bytearray()
    data.extend(map(ord, msg))
    ecc = bch.encode(data)
    packet = data + ecc
    binPacket = ""
    for i in range(0, len(packet)):
        binPacket += '{0:08b}'.format(packet[i])
        # binPacket += " "
    return binPacket
def decode_data(infile,oligo_length,outfile,BCH_bits,LDPC_alpha,LDPC_prefix,file_size, eps, mode = 'correct', MSA=False, sync='',sync_pos=-1):
	'''
	Decoder corresponding to encoder encode_data. Need same parameters as that.
	infile is file containing reads of the same length as the oligo_length.
	Returns status - 0 for success, 1 for failure
	In case of success, resulting decoded file is written to outfile.
	file_size is the size of the original file in bytes.
	mode is correct or detect as in remove_index
	eps is the error to be used in LDPC LLR
	'''
	data_len = 8*file_size
	decoded_data = []

        # NEW
	# calculate various parameters for encoding
        num_LDPC_blocks = int(math.ceil(1.0*data_len/LDPC_dim))
        parity_bits_per_LDPC_block = int(LDPC_alpha*LDPC_dim)

	if BCH_bits != 0:
		bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_bits)
		# calculate number of bases used for index
		num_bases_BCH = BCH_bits*BCH_bits_per_error/2
		num_bases_index = index_block_len_noRLL + num_bases_BCH
	else:
		num_bases_index = index_block_len_noRLL
        oligo_length_before_sync = oligo_length - len(sync)
	num_bases_payload = oligo_length_before_sync - num_bases_index
	bits_per_oligo = num_bases_payload*2
	num_oligos_data_per_LDPC_block = int(math.ceil(1.0*LDPC_dim/(bits_per_oligo)))
	num_oligos_parity_per_LDPC_block = int(math.ceil(1.0*parity_bits_per_LDPC_block/bits_per_oligo))
	num_oligos_per_LDPC_block = num_oligos_data_per_LDPC_block+num_oligos_parity_per_LDPC_block
	overall_rate = 1.0*data_len/(num_oligos_per_LDPC_block*num_LDPC_blocks*oligo_length)
	print 'overall rate:' ,overall_rate, 'bpb'
	print 'num oligos:', num_LDPC_blocks*num_oligos_per_LDPC_block
	print 'oligo length:', oligo_length, 'bases'
	print 'bases per oligo for index + index parity:', num_bases_index
	print 'fraction of oligos used for parity check:', 1.0*num_oligos_parity_per_LDPC_block/num_oligos_per_LDPC_block
	print 'number of LDPC blocks:', num_LDPC_blocks

	# find positions of parity bits in LDPC encoded file
	f_sys = open(LDPC_prefix+".systematic",'r')
	sys_pos = f_sys.readlines()
	f_sys.close()
	sys_pos = np.array([int(i) for i in sys_pos])
	mask = np.zeros(parity_bits_per_LDPC_block+LDPC_dim,dtype=bool)
	mask[sys_pos] = True
	mask = ~mask
	parity_pos = np.nonzero(mask)[0]
	tmp_index_file = infile+'.tmp.index'
	tmp_data_file = infile+'.tmp.data'
Beispiel #16
0
def samplepdf2(f, r):
    for n in range(r):
        pdf2 = randombinary(26).zfill(26)
        bch2 = Fcn.calcbch('0' * 107 + pdf2, '1010100111001', 107, 133,
                           145) + '0000'
        bch = bchlib.BCH(67, 2)
        data = bytearray(bch1correct.bitstring_to_bytes(pdf2))
        ecc = bch.encode(data)
        bchstring = (Fcn.dec2bin(ecc[0]).zfill(8) +
                     Fcn.dec2bin(ecc[1]).zfill(8))[:12] + '0000'

        e = random.sample(range(0, 38), 3)

        scramble = list(pdf2 + bch2)
        for i in e:
            scramble[i] = str(int(not int(scramble[i])))
        corrupt = ''.join(scramble)

        ecc_provided = bytearray(bch1correct.bitstring_to_bytes(corrupt[26:]))
        data_provided = bytearray(bch1correct.bitstring_to_bytes(corrupt[:26]))
        ecc_c = bch.encode(data_provided)
        print(ecc_c == ecc_provided, ecc, ecc_provided)

        packet = bytearray(bch1correct.bitstring_to_bytes(
            corrupt[:26])) + bytearray(
                bch1correct.bitstring_to_bytes(corrupt[26:]))
        data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]
        bitflips = bch.decode_inplace(data, ecc)
        newdata = Fcn.dec2bin(data[0]).zfill(2)
        for e in data[1:]:
            binchar = Fcn.dec2bin(e).zfill(8)
            newdata = newdata + binchar

        correctedbch2 = ''  # decodefunctions.dec2bin(ecc[0])
        for e in ecc:
            binchar = Fcn.dec2bin(e).zfill(8)
            correctedbch2 = correctedbch2 + binchar
        correctedbch2 = correctedbch2[:-4]
        bch2 = bch2[:-4]
        bchstring = bchstring[:-4]

        f.writelines([
            '{},{},{},{},{},{},{},{}'.format(
                pdf2 + bch2, bch2 == bchstring, corrupt, len(pdf2 + bch2),
                corrupt == pdf2 + bch2, newdata + correctedbch2,
                newdata + correctedbch2 == pdf2 + bch2, bitflips), '\n'
        ])
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('model', type=str)
    parser.add_argument('--image', type=str, default=None)
    parser.add_argument('--images_dir', type=str, default=None)
    parser.add_argument('--secret_size', type=int, default=100)
    args = parser.parse_args()

    if args.image is not None:
        files_list = [args.image]
    elif args.images_dir is not None:
        files_list = glob.glob(args.images_dir + '/*')
    else:
        print('Missing input image')
        return

    decoder = torch.load(args.model)

    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

    for filename in files_list:
        image = Image.open(filename).convert("RGB")
        image = np.array(ImageOps.fit(image, (400, 400)), dtype=np.float32)
        image /= 255.

        secret = decoder(image)

        packet_binary = "".join([str(int(bit)) for bit in secret[:96]])
        packet = bytes(
            int(packet_binary[i:i + 8], 2)
            for i in range(0, len(packet_binary), 8))
        packet = bytearray(packet)

        data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]

        bitflips = bch.decode_inplace(data, ecc)

        if bitflips != -1:
            try:
                code = data.decode("utf-8")
                print(filename, code)
                continue
            except:
                continue
        print(filename, 'Failed to decode')
Beispiel #18
0
 def decodeBCH(self, array):
     polynomial = 8219
     t = 2
     bch = bchlib.BCH(polynomial, t)
     data, code = array[:-bch.ecc_bytes], array[-bch.ecc_bytes:]
     data = bytearray(data)
     code = bytearray(code)
     flips, data, code = bch.decode(data, code)
     data = list(data)
     print("Odkodowana wiadomość: ")
     if flips is -1:
         self.analysis.addAmount(0, 1, 0)
     elif flips > 0:
         self.analysis.addAmount(0, 0, 1)
     elif flips is 0:
         self.analysis.addAmount(1, 0, 0)
     print(data)
def decode(input_file, recon_file):
    decode_raptor_script = "../python-libraptorq/rq --debug decode "
    f_input = open(input_file, "r")
    data = json.load(f_input)
    f_input.close()
    bch = bchlib.BCH(BCH_POLYNOMIAL, data['BCH_bits'])
    reads_dict = {}
    num_bytes_per_read = len(data['symbols'][0][1]) // 8
    # load reads and store according to index
    for read in data['symbols']:
        if read[0] in reads_dict:
            reads_dict[read[0]].append([int(c) for c in read[1]])
        else:
            reads_dict[read[0]] = [[int(c) for c in read[1]]]
    corrected_reads = []  #to store index and corrected bitstring
    bitflips_dict = {
    }  #to store the number of bitflips for each index (for sorting later)
    # convert to numpy arrays, find consensus, do BCH decoding and keep if decoding successful
    for k in reads_dict.keys():
        read_array = np.array(reads_dict[k], dtype=int)
        majority_list = np.array(np.mean(read_array, axis=0) > 0.5, dtype=int)
        majority_str = ''.join([str(c) for c in majority_list])
        majority_str_bytes = binascii.unhexlify(
            ((hex(int(majority_str,
                      2)))[2:-1]).zfill(2 * (num_bytes_per_read)))
        maj_data, maj_ecc = majority_str_bytes[:-bch.
                                               ecc_bytes], majority_str_bytes[
                                                   -bch.ecc_bytes:]
        (bitflips, maj_data, maj_ecc) = bch.decode(maj_data, maj_ecc)
        if bitflips >= 0:  #success
            corrected_str = base64.urlsafe_b64encode(maj_data)
            corrected_reads.append([k, corrected_str])
            bitflips_dict[k] = bitflips
    corrected_reads.sort(key=lambda x: bitflips_dict[x[0]])
    output_data = dict(data)
    output_data['symbols'] = corrected_reads
    f_intermediate = open("tmpfile", "w")
    f_intermediate.write(
        json.dumps(output_data,
                   sort_keys='False',
                   indent=2,
                   separators=(',', ': ')))
    f_intermediate.close()
    ret = subprocess.call([decode_raptor_script + " tmpfile " + recon_file],
                          shell=True)
    return ret
Beispiel #20
0
    def __init__(self,
                 witness_nbits: int,
                 tolerance: int,
                 extractor: Optional[Callable[[K], BitVector]] = None,
                 polynomial: int = BCH_POLYNOMIAL) -> None:
        """Initializes FCS.

        Args:
            witness_nbits: Length of the witness in bits.
            tolerance: Number of changed bits tolerated by the scheme.
            extractor: Optional function to extract a BitVector from K.
            polynomial: Optional polynomial for the BCH code (see bchlib).
        """
        self._witlen = witness_nbits
        if extractor is None:
            extractor = _byte_extractor
        self._extractor = extractor
        self._bch = bchlib.BCH(polynomial, tolerance)
Beispiel #21
0
def test_bchlib():
    # create a bch object
    BCH_POLYNOMIAL = 8219
    BCH_BITS = 16
    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

    # random data
    data = bytearray(os.urandom(512))

    # encode and make a "packet"
    ecc = bch.encode(data)
    packet = data + ecc

    # print hash of packet
    sha1_initial = hashlib.sha1(packet)

    def bitflip(packet):
        byte_num = random.randint(0, len(packet) - 1)
        bit_num = random.randint(0, 7)
        packet[byte_num] ^= (1 << bit_num)

    # make BCH_BITS errors
    for _ in range(BCH_BITS):
        bitflip(packet)

    # print hash of packet
    sha1_corrupt = hashlib.sha1(packet)

    # de-packetize
    data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]

    # correct
    bitflips = bch.decode_inplace(data, ecc)

    # packetize
    packet = data + ecc

    # print hash of packet
    sha1_corrected = hashlib.sha1(packet)

    assert sha1_initial.digest() == sha1_corrected.digest()
def decode(input_file, recon_file):
	decode_raptor_script = "./rq --debug decode "    
	f_input = open(input_file, "r");
	data = json.load(f_input)
	f_input.close()
	bch = bchlib.BCH(BCH_POLYNOMIAL, data['BCH_bits'])
	corrected_reads = []
	# put corrected reads into corrected reads and drop non-corrected reads (no consensus)
	for read in data['symbols']:
		read_bytes = binascii.unhexlify(((hex(int(read[1],2)))[2:-1]).zfill(2*(data['symbol_size']+data['BCH_bits'])))
		read_data,read_ecc = read_bytes[:-bch.ecc_bytes], read_bytes[-bch.ecc_bytes:]
		(bitflips, read_data, read_ecc) = bch.decode(read_data, read_ecc)
		if bitflips >= 0: #success
			corrected_str = bin(int(binascii.hexlify(read_data), 16))[2:].zfill(data['symbol_size']*8)
			corrected_reads.append([read[0], corrected_str])
	output_data = dict(data)
	output_data['symbols'] = corrected_reads
	f_intermediate = open("tmpfile", "w");
	f_intermediate.write(json.dumps(output_data, sort_keys = 'False', indent=2, separators=(',', ': ')))
	f_intermediate.close()
	ret = subprocess.call([decode_raptor_script+" tmpfile "+ recon_file], shell=True)
	return ret
def add_index(num_oligos, BCH_bits, infile_name, outfile_name, bin_index_len):
    '''
    Generate DNA encoding of all indexes from 0 to num_oligos-1, each protected with BCH_bits protection.
    The index DNA strings are appropriately concatenated with corresponding lines from infile_name and the resulting oligos are written to outfile_name, one line per index.
    Throw error if num_oligos > 2**index_len.
    '''
    if bin_index_len not in prp_a:
        raise Exception('Invalid index_len (see prp_a dict for valid options)')
    MAX_OLIGOS = 2**bin_index_len
    if num_oligos > MAX_OLIGOS:
        raise Exception('Too many oligos for index len')

    num_bytes_BCH_input = math.ceil(bin_index_len / 8)

    if BCH_bits != 0:
        bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_bits)
    index = 0
    with open(infile_name) as infile, open(outfile_name, 'w') as outfile:
        for line in infile:
            index_prp = (prp_a[bin_index_len] * index +
                         prp_b[bin_index_len]) % MAX_OLIGOS
            bin_string = bin(index_prp)[2:].zfill(bin_index_len)
            dna = bin2dna_2bpb(bin_string)
            if BCH_bits != 0:
                bits_ecc = bytes_to_binary_string(
                    bch.encode(
                        binary_string_to_bytes(
                            bin_string.zfill(8 * num_bytes_BCH_input))))
                bits_ecc = bits_ecc[:BCH_bits * BCH_bits_per_error]
                dna_ecc = bin2dna_2bpb(bits_ecc)
                outfile.write(dna + dna_ecc + line)
            else:
                outfile.write(dna + line)
            index += 1
            if index == num_oligos:
                break
Beispiel #24
0
def main():
    # Initializing network
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    detector_graph = tf.Graph()
    decoder_graph = tf.Graph()

    with detector_graph.as_default():
        detector_sess = tf.Session()
        detector_model = tf.saved_model.loader.load(detector_sess,
                                                    [tag_constants.SERVING],
                                                    args.detector_model)

        detector_input_name = detector_model.signature_def[
            signature_constants.
            DEFAULT_SERVING_SIGNATURE_DEF_KEY].inputs['image'].name
        detector_input = detector_graph.get_tensor_by_name(detector_input_name)

        detector_output_name = detector_model.signature_def[
            signature_constants.
            DEFAULT_SERVING_SIGNATURE_DEF_KEY].outputs['detections'].name
        detector_output = detector_graph.get_tensor_by_name(
            detector_output_name)

    with decoder_graph.as_default():
        decoder_sess = tf.Session()
        decoder_model = tf.saved_model.loader.load(decoder_sess,
                                                   [tag_constants.SERVING],
                                                   args.decoder_model)

        decoder_input_name = decoder_model.signature_def[
            signature_constants.
            DEFAULT_SERVING_SIGNATURE_DEF_KEY].inputs['image'].name
        decoder_input = decoder_graph.get_tensor_by_name(decoder_input_name)

        decoder_output_name = decoder_model.signature_def[
            signature_constants.
            DEFAULT_SERVING_SIGNATURE_DEF_KEY].outputs['decoded'].name
        decoder_output = decoder_graph.get_tensor_by_name(decoder_output_name)

    cap = cv2.VideoCapture(args.video)
    bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

    if args.save_video is not None:
        ret, frame = cap.read()
        fourcc1 = cv2.VideoWriter_fourcc(*'XVID')
        out = cv2.VideoWriter(args.save_video, fourcc1, 30.0, (1920, 1080))

    while (True):
        ret, frame = cap.read()
        if frame is None:
            break
        frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)

        detector_image_input = cv2.resize(frame_rgb, (1024, 1024))
        detector_image_input = np.expand_dims(np.float32(detector_image_input),
                                              axis=0) / 255.0

        output_image = detector_sess.run(
            detector_output, feed_dict={detector_input: detector_image_input})
        output_image = np.array(output_image[0, :, :, :])
        output_image = x = np.argmax(output_image, axis=-1)

        color_codes = np.array([[255, 255, 255], [0, 0, 0]])
        out_vis_image = color_codes[output_image.astype(int)]

        mask_im = cv2.resize(np.float32(out_vis_image), (1920, 1080))

        contours, _ = cv2.findContours(
            cv2.cvtColor(mask_im, cv2.COLOR_BGR2GRAY).astype(np.uint8), 1, 2)
        extrema = np.zeros((8, 2))
        corners = np.zeros((4, 2))
        for cnt in contours:
            area = cv2.contourArea(cnt)
            if area < 1000:
                continue

            hull = cv2.convexHull(cnt)
            if len(hull) < 4:
                continue

            extrema[0, :] = hull[np.argmax(hull[:, 0, 0]), 0, :]
            extrema[1, :] = hull[np.argmax(hull[:, 0, 0] + hull[:, 0, 1]),
                                 0, :]
            extrema[2, :] = hull[np.argmax(hull[:, 0, 1]), 0, :]
            extrema[3, :] = hull[np.argmax(-hull[:, 0, 0] + hull[:, 0, 1]),
                                 0, :]
            extrema[4, :] = hull[np.argmax(-hull[:, 0, 0]), 0, :]
            extrema[5, :] = hull[np.argmax(-hull[:, 0, 0] - hull[:, 0, 1]),
                                 0, :]
            extrema[6, :] = hull[np.argmax(-hull[:, 0, 1]), 0, :]
            extrema[7, :] = hull[np.argmax(hull[:, 0, 0] - hull[:, 0, 1]),
                                 0, :]

            extrema_lines = extrema - np.roll(extrema, shift=1, axis=0)
            extrema_len = extrema_lines[:, 0]**2 + extrema_lines[:, 1]**2
            line_idx = np.sort(extrema_len.argsort()[-4:])
            for c in range(4):
                p1 = extrema[line_idx[(c - 1) % 4], :]
                p2 = extrema[(line_idx[(c - 1) % 4] - 1) % 8, :]
                p3 = extrema[line_idx[c], :]
                p4 = extrema[(line_idx[c] - 1) % 8, :]
                corners[c, :] = get_intersect(p1, p2, p3, p4)

            new_area = poly_area(corners)
            if new_area / area > 1.5:
                continue

            corners = order_points(corners)
            corners_full_res = corners

            pts_dst = np.array([[0, 0], [399, 0], [399, 399], [0, 399]])
            h, status = cv2.findHomography(corners_full_res, pts_dst)
            try:
                warped_im = cv2.warpPerspective(frame_rgb, h, (400, 400))
                w_im = warped_im.astype(np.float32)
                w_im /= 255.
            except:
                continue

            for im_rotation in range(4):
                w_rotated = np.rot90(w_im, im_rotation)
                recovered_secret = decoder_sess.run(
                    [decoder_output], feed_dict={decoder_input:
                                                 [w_rotated]})[0][0]
                recovered_secret = list(recovered_secret)
                recovered_secret = [int(i) for i in recovered_secret]

                packet_binary = "".join(
                    [str(bit) for bit in recovered_secret[:96]])
                footer = recovered_secret[96:]
                if np.sum(footer) > 0:
                    continue
                packet = bytes(
                    int(packet_binary[i:i + 8], 2)
                    for i in range(0, len(packet_binary), 8))
                packet = bytearray(packet)

                data, ecc = packet[:-bch.ecc_bytes], packet[-bch.ecc_bytes:]

                bitflips = bch.decode_inplace(data, ecc)

                if bitflips != -1:
                    print('Num bits corrected: ', bitflips)
                    try:
                        code = data.decode("utf-8")
                    except:
                        continue
                    color = (100, 250, 100)
                    cv2.polylines(frame,
                                  np.int32([corners]),
                                  thickness=6,
                                  color=color,
                                  isClosed=True)
                    font = cv2.FONT_HERSHEY_SIMPLEX
                    im = cv2.putText(
                        frame, code,
                        tuple((corners[0, :] + np.array([0, -15])).astype(
                            np.int)), font, 1, (0, 0, 0), 2, cv2.LINE_AA)

        if args.save_video is not None:
            out.write(frame)
        else:
            cv2.imshow('frame', frame)
            cv2.waitKey(1)

    cap.release()
    if args.save_video:
        out.release()
def remove_index(num_oligos,
                 BCH_bits,
                 infile_name,
                 outfile_data,
                 outfile_index,
                 bin_index_len,
                 attempt_indel_cor=True):
    '''
    Decode index from a collection of (noisy) reads in infile_name and write data and index to outfile_data and outfile_index, line by line, skipping positions where index failed to decode.
    attempt_indel_cor: if substitution decoding fails, try to decode indel by trying positions one by one (first do deletion, if exactly one possibility, good otherwise also try insertions).
    '''
    max_bitflips_sub = BCH_bits
    max_bitflips_indel = 0
    if bin_index_len not in prp_a:
        raise Exception('Invalid index_len (see prp_a dict for valid options)')
    MAX_OLIGOS = 2**bin_index_len
    if num_oligos > MAX_OLIGOS:
        raise Exception('Too many oligos for index len')
    dna_index_len = bin_index_len // 2
    num_bytes_BCH_input = math.ceil(bin_index_len / 8)
    if BCH_bits != 0:
        bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_bits)
        # calculate number of bases used for index
        num_bases_BCH = BCH_bits * BCH_bits_per_error // 2
        num_bases_index = dna_index_len + num_bases_BCH
    else:
        num_bases_index = dna_index_len
    count_success = 0
    count_indel_corrected = 0
    count_failed = 0
    with open(infile_name) as infile, open(outfile_data, 'w') as f_data, open(
            outfile_index, 'w') as f_index:
        for line in infile:
            dna_data = line[num_bases_index:]
            dna_index = line[:num_bases_index]
            if BCH_bits != 0:
                successful_indices = []
                indel_corrected = False
                # first try direct decoding with substitutions
                (bitflips, cor_index,
                 cor_ecc) = decode_index_BCH(dna_index, num_bases_BCH,
                                             BCH_bits, num_bytes_BCH_input,
                                             bch)
                if bitflips >= 0 and bitflips <= max_bitflips_sub:
                    index = decode_index_prp(cor_index, bin_index_len,
                                             prp_a_inv[bin_index_len],
                                             prp_b[bin_index_len], MAX_OLIGOS)
                    if index < num_oligos:
                        successful_indices.append((index, dna_data))

                if len(successful_indices) == 0 and attempt_indel_cor:
                    # indel correction mode
                    # try to correct 1 deletion by inserting each base at each possible position
                    # and try BCH decoding with at most max_bitflips_indel errors
                    indel_corrected = True
                    dna_index = line[:num_bases_index - 1]
                    for pos in range(num_bases_index):
                        for new_base in ['A', 'C', 'G', 'T']:
                            new_dna_index = dna_index[:pos] + \
                                new_base+dna_index[pos:]
                            (bitflips, cor_index, cor_ecc) = decode_index_BCH(
                                new_dna_index, num_bases_BCH, BCH_bits,
                                num_bytes_BCH_input, bch)
                            if bitflips >= 0 and bitflips <= max_bitflips_indel:
                                index = decode_index_prp(
                                    cor_index, bin_index_len,
                                    prp_a_inv[bin_index_len],
                                    prp_b[bin_index_len], MAX_OLIGOS)
                                if index < num_oligos:
                                    successful_indices.append(
                                        (index, line[num_bases_index - 1:]))
                            if len(successful_indices) > 1:
                                break  # fail
                    if len(successful_indices) == 0:
                        # try insertion (i.e., delete positions one by one and see if it satisfies BCH w/o error)
                        dna_index = line[:num_bases_index + 1]
                        for pos in range(num_bases_index):
                            new_dna_index = dna_index[:pos] + \
                                dna_index[pos+1:]
                            (bitflips, cor_index, cor_ecc) = decode_index_BCH(
                                new_dna_index, num_bases_BCH, BCH_bits,
                                num_bytes_BCH_input, bch)
                            if bitflips >= 0 and bitflips <= max_bitflips_indel:
                                index = decode_index_prp(
                                    cor_index, bin_index_len,
                                    prp_a_inv[bin_index_len],
                                    prp_b[bin_index_len], MAX_OLIGOS)
                                if index < num_oligos:
                                    successful_indices.append(
                                        (index, line[num_bases_index + 1:]))
                            if len(successful_indices) > 1:
                                break  # fail

                # succeed if exactly one successful decoding
                if len(successful_indices) == 1:
                    f_data.write(successful_indices[0][1])
                    f_index.write(str(successful_indices[0][0]) + '\n')
                    count_success += 1
                    if indel_corrected:
                        count_indel_corrected += 1
                else:
                    count_failed += 1
            else:
                bin_index = dna2bin_2bpb(dna_index)
                index_prp = int(bin_index, 2)
                index = (prp_a_inv[bin_index_len] *
                         (index_prp - prp_b[bin_index_len])) % MAX_OLIGOS
                if index < num_oligos:
                    f_data.write(dna_data)
                    f_index.write(str(index) + '\n')
                    count_success += 1
                else:
                    count_failed += 1
    print("Successfully decoded", count_success, "indices")
    print("Deletion corrected", count_indel_corrected)
    print("Failed to decode", count_failed, "indices")
Beispiel #26
0
def crc(data, pol, bits):
    bch = bchlib.BCH(pol, bits)
    return bch.encode(data)
Beispiel #27
0
 def get_instance(ecc_strength):
     if BCH.instance is None:
         BCH.instance = bchlib.BCH(8219, ecc_strength, reverse=True)
     return BCH.instance
Beispiel #28
0
import bchlib
import hashlib
import os
import random
from reedsolo import RSCodec
from bitstring import BitArray

# create a bch object
BCH_POLYNOMIAL = 8219
BCH_BITS = 16
bch = bchlib.BCH(BCH_POLYNOMIAL, BCH_BITS)

error_chance = 0.01
errors = 0


def chunk(l, chunk_size):
    return [l[i:i + chunk_size] for i in range(0, len(l), chunk_size)]


def compare(a, b):
    correct = 0
    for i in range(len(a)):
        if a[i] == b[i]:
            correct = correct + 1
    return correct


# BCH
def bchEncode(data):
    ecc = bch.encode(data)
Beispiel #29
0
# Installation :
# python -m pip install bchlib

import bchlib
import hashlib
import os
import random

# on créer un objet BCH, le polynome est un polynome primitif du champ de Galois de 2 puissance 13,14 ou 15
# https://link.springer.com/content/pdf/bbm%3A978-1-4615-1509-8%2F1.pdf
BCH_POLYNOME = 16659 
# nombre de bits que l'on va changer pour mettre des erreurs
BCH_BITS = 50
bch = bchlib.BCH(BCH_POLYNOME, BCH_BITS)

# données aléatoires
data = bytearray(os.urandom(512))

# encodage dans un paquet
ecc = bch.encode(data)
paquet = data + ecc

# affichage du hash du paquet
sha1_initial = hashlib.sha1(paquet)
print('sha1: %s' % (sha1_initial.hexdigest(),))

"""Fonction pour ajouter aléatoirement des erreurs dans notre paquet"""
def bitflip(paquet):
    byte_num = random.randint(0, len(paquet) - 1)
    bit_num = random.randint(0, 7)
    paquet[byte_num] ^= (1 << bit_num)
def atmel_generate_ecc_data(infile, outfile, config, crypto_key):
    """Generate ECC data and resulting dump file for ATMEL"""

    # initialize BCH encoder
    bch = bchlib.BCH(config['ecc_polynom'], config['ecc_errors'], False)

    # open output file
    fout = open(outfile, "wb")

    # open input file
    fin = open(infile, "rb")

    # initialize some variables
    processed_sector_count = 0
    data_sector_count = 0
    blank_page_count = 0
    total_page_count = config['filesize'] // config['pagesize']
    sectors_per_page = config['pagesize'] // config['sectorsize']
    total_sectors = total_page_count * sectors_per_page
    ecc_bytes_total = sectors_per_page * config['ecc_bytes_per_sector']

    # blank page data
    blank_page = b'\xff' * config['pagesize']

    # blank spare area data
    blank_spare_area = b'\xff' * config['spareareasize']

    # spare bytes before and after ECC data
    spare_area1 = b'\xff' * config['ecc_offset']
    spare_area2 = b'\xff' * (config['spareareasize'] - config['ecc_offset'] -
                             ecc_bytes_total)

    print("[*] Generating output file ...")
    for page in range(total_page_count):
        # read current block data
        page_data = fin.read(config['pagesize'])

        processed_sector_count += sectors_per_page

        if page_data == blank_page:
            # increment blank page counter
            blank_page_count += 1

            # write blank page data and blank spare area data
            fout.write(page_data + blank_spare_area)

        else:
            # increment data sector counter
            data_sector_count += sectors_per_page

            # generate ECC for each sector
            eccs = b''

            for sector in range(sectors_per_page):
                start_sector = sector * config['sectorsize']
                end_sector = start_sector + config['sectorsize']
                sector_data = reverse_bits(page_data[start_sector:end_sector])

                ecc = bch.encode(sector_data)

                # encrypt ECC
                # key = unhexlify("F78A7490B7C95943E99EA724AD")
                ecc_encrypted = xor_crypto(reverse_bits(ecc), crypto_key)

                eccs += ecc_encrypted
                # eccs += reverse_bits(ecc_encrypted)

            spare_area_data = spare_area1 + eccs + spare_area2
            fout.write(page_data + spare_area_data)

        # show some statistics during processing all sectors
        progress = processed_sector_count / total_sectors * 100
        print("\r    Progress: {:.2f}% ({}/{} sectors)".format(
            progress, processed_sector_count, total_sectors),
              end="")

    # close output file
    fout.close()

    # close input file
    fin.close()

    # show some statistics at the end
    blank_page_percentage = blank_page_count / total_page_count * 100
    blank_sector_count = blank_page_count * sectors_per_page
    blank_sector_percentage = blank_sector_count / total_sectors * 100
    data_sector_percentage = data_sector_count / total_sectors * 100
    bad_block_count = 0

    print("\n[*] Completed error correcting process")
    print(
        "    Successfully written {} bytes of data to output file '{}'".format(
            config['sectorsize'] * total_sectors, outfile))
    print("    -----\n    Some statistics\n"
          "    Total pages:        {}\n"
          "    Blank pages:        {} ({:.2f}%)\n"
          "    Blank sectors:      {} ({:.2f}%)\n"
          "    Data sectors:       {} ({:.2f}%)\n"
          "    Total sectors:      {}\n"
          "    Bad blocks:         {}".format(
              total_page_count, blank_page_count, blank_page_percentage,
              blank_sector_count, blank_sector_percentage, data_sector_count,
              data_sector_percentage, total_sectors, bad_block_count))