def readFile0BC(context): filedata = None with open(context.filepath, 'rb') as f: filedata = f.read() bd = BinData(filedata) print('file size =', bd.dataSize()) h = readHeader0BC(bd) context.header = h pprint(h) if h.maybeDataType == 1: assert(context.expectedFormat == 0x10) # assert(h.countRecords == 0) readPhysicalCollision0BC(bd, context) elif h.maybeDataType != 0: print('Yet unknown and unexpected header') assert(False) else: if context.expectedFormat == 0x12: readRecords0BC_format_12(bd, context) else: readRecords0BC(bd, context) print(bd.tell(), bd.dataSize())
def main(): parser = argparse.ArgumentParser( description= "-= Encoder/Decoder of convolutional codes.\nAuthor: Jozef Knaperek =-\n" ) group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-e', '--encode', action='store_true', help='encode data with convolutional code') group.add_argument('-d', '--decode', action='store_true', help='decode data using Viterbi algorithm') def make_bindata(arg): return BinData(arg) make_bindata.__name__ = 'input data' parser.add_argument('-i', '--input', type=make_bindata, help='input data bit-stream (instead of using stdin)') def make_pols_list(arg): pols = [int(pol, 2) for pol in arg.split(',')] if min(map(ones_count, pols)) < 2: raise ValueError( 'Every valid polynomial must have at least two binary 1s') return pols make_pols_list.__name__ = 'polynomials list' parser.add_argument( 'polynomials', help= 'comma separated list of binnary polynomials (of at least two binary 1s in each)', type=make_pols_list) args = parser.parse_args() try: input_data = args.input or BinData(sys.stdin.read()) except ValueError: sys.exit('Invalid input data: ' + stdin_input) if args.encode: # encode print(Transitions(args.polynomials).encode(input_data)) else: # decode if len(input_data) % len(args.polynomials): sys.exit( 'Decoding error: The number of data bits ({}) is not multiple of the number of polynomials ({})!' .format(len(input_data), len(args.polynomials))) print(args.polynomials) print(Transitions(args.polynomials).decode(input_data))
def decode(self, parity_sequence_bindata): """ Decodes convolutional code using the Viterbi algorithm. Public method (API). """ gen = self.extract_parity_sequence(parity_sequence_bindata) state = 0 # initial state INF = float('inf') # constant definition class Node(): def __init__(self, metric=INF, bindata=None): self.metric = metric self.bindata = bindata or BinData(0, 0) # init trellis olds = [Node(INF, BinData(0, 0)) for i in range(self.n_states)] # aktualna metrika, data bits news = [Node(None, None) for i in range(self.n_states) ] # nova metrika, data bits (with added one new bit) olds[0].metric = 0 # set metrics of first state to 0 # iterate through parities in encoded data (parity sequence) for parity in gen: # initialize news for new in news: new.metric = INF # set new PM to infinity # choose best paths for new step for i in range(self.n_states): for bit in (0, 1): t = self.states[i][bit].new_state p = self.states[i][bit].parity hd = hamming_distance(p, parity) new_PM = olds[i].metric + hd # compute candidate PM if new_PM < news[ t].metric: # if this new candidate is better than existing new candidate news[t].metric = new_PM news[t].bindata = olds[i].bindata + bit # update "column" in trellis with best paths chosen in previous step and prepare for next iteration for i in range(self.n_states): olds[i].metric = news[i].metric olds[i].bindata = news[i].bindata # Finalization # Get state with best PM best_state, best_PM = None, INF for old in olds: if old.metric < best_PM: best_PM = old.metric best_state = old # Decoded databits: return best_state.bindata
def _prepare_stream(self,bstream,offset): """ Initialize the offset and mode the stream to it. The offset should a BinData object to allow references to it As so, an update in an offset will update all its the references. """ if offset == None: offset = bstream.offset if issubclass(offset.__class__,(types.IntType,)): self.offset = BinData(4) self.offset.init_data_from_int(offset) elif issubclass(offset.__class__,(BinData,)): self.offset = offset elif (issubclass(offset.__class__,(Entry,)) and '__int__' in dir(offset)): self.offset = offset else: raise Exception('Invalid type for EntryList offset (%s) in class %s' % (offset,self)) bstream.seek(int(self.offset))
def __init__(self,bstream,entry_type,size,offset=None): """ Parse an entry list from a bytestream entry_type should be any Entry type generated with Entry.create() """ self.data = [] self.corrupted = False self.type = entry_type #Set the entry offset if offset == None: self.offset = BinData(4) self.offset.init_data_from_int(bstream.offset) elif issubclass(offset.__class__,(types.IntType,)): self.offset = BinData(4) self.offset.init_data_from_int(offset) elif issubclass(offset.__class__,(BinData,)): self.offset = offset else: raise Exception('Invalid type for EntryList offset: %s' % offset.__class__) bstream.seek(int(self.offset)) if issubclass(size.__class__,(types.IntType,)): self.size = BinData(4) self.size.init_data_from_int(size) elif issubclass(size.__class__,(BinData,)): self.size = size else: raise Exception('Invalid type for EntryList size: %s' % size.__class__) for i in xrange(int(self.size)): self.data.append(self.type(bstream)) if bstream.exhausted: self.corrupted = True break
class Entry(object): """ Generic Entry This class is used to generate other classes dinamically that represents the target file format """ "STUB: will be replaced during create() call" attr_map = [] def __init__(self,bstream,offset=None): """Generic initialization of an entry structure""" self.corrupted = False self.attributes = [] self._prepare_stream(bstream,offset) for item in self.attr_map: self._add_attr(bstream,item) def blob(self): """Serialize the bytes of the Entry""" blob = bytearray('') for attr in self.attributes: blob += attr.data return blob def _prepare_stream(self,bstream,offset): """ Initialize the offset and mode the stream to it. The offset should a BinData object to allow references to it As so, an update in an offset will update all its the references. """ if offset == None: offset = bstream.offset if issubclass(offset.__class__,(types.IntType,)): self.offset = BinData(4) self.offset.init_data_from_int(offset) elif issubclass(offset.__class__,(BinData,)): self.offset = offset elif (issubclass(offset.__class__,(Entry,)) and '__int__' in dir(offset)): self.offset = offset else: raise Exception('Invalid type for EntryList offset (%s) in class %s' % (offset,self)) bstream.seek(int(self.offset)) def _get_size(self,bstream,size): """ Return an integer value based on the provided size. Size can be an Integer, a BinData object, a method in the format func(self,bstream) or an attribute name """ #size is a constant value if issubclass(size.__class__,(types.IntType,)): return size elif issubclass(size.__class__,(BinData,)): return int(size) #size is calculated from method elif (issubclass(size.__class__,(types.FunctionType,)) or issubclass(size.__class__,(types.MethodType,))): return size(self,bstream) elif issubclass(size.__class__,(types.StringType,)): #size is in another field from this entry if size in self.__dict__.keys(): return int(self.__dict__[size]) #size is an evaluated expression else: return eval(size) else: raise Exception("Invalid size type in Entry.") def _add_attr(self,bstream,attr_item): """ Parse all the structures in the attr_map and initialize the attributes in the dictionary """ name = attr_item[0] size = self._get_size(bstream,attr_item[1]) etype = attr_item[2] #Raw binary data if issubclass(etype,(BinData,)): self.attributes.append(etype(size)) # No need to read if bytestream is already exhausted if not bstream.exhausted: self.attributes[-1].init_data(bstream.read(size)) #Entry subclass elif issubclass(etype,Entry): if size > 0: self.attributes.append(etype(bstream)) else: self.__dict__[name] = None return #Entry List elif etype == EntryList: if len(attr_item) < 4: raise Exception("Missing value for entry %s" % name) list_type = attr_item[3] self.attributes.append(EntryList(bstream,list_type,size)) else: raise Exception("Invalid type for entry field: %s" % etype) #If we could not read, mark self as corrupted if bstream.exhausted: self.corrupted = True #add attr name to dictionary self.__dict__[name] = self.attributes[-1] @staticmethod def create(name,attr_map): """ Creates a specialized Entry The attr_list should be a dictionaly with the tuple [field_name,field_size,field_mode,extra] field_name: - String naming the field field_size: - Integer: Hardcode size in bytes - function(bytestrem): A function that will be called on target bytestream to get the size - String: The name of another field previously read that provides the size or an expression to be evaluated with eval() that evaluates to an integer field_mode: - The BinData mode of the field extra: - for BinData: BinData mode - for Entry: unused - for EntryList: entry type of the list """ return type(name,(Entry,),{'attr_map':attr_map})
class EntryList(object): """ This class represents a linear list of entry structures in the target file. The structures must be sequential in the file and must be of the same type. """ def __init__(self,bstream,entry_type,size,offset=None): """ Parse an entry list from a bytestream entry_type should be any Entry type generated with Entry.create() """ self.data = [] self.corrupted = False self.type = entry_type #Set the entry offset if offset == None: self.offset = BinData(4) self.offset.init_data_from_int(bstream.offset) elif issubclass(offset.__class__,(types.IntType,)): self.offset = BinData(4) self.offset.init_data_from_int(offset) elif issubclass(offset.__class__,(BinData,)): self.offset = offset else: raise Exception('Invalid type for EntryList offset: %s' % offset.__class__) bstream.seek(int(self.offset)) if issubclass(size.__class__,(types.IntType,)): self.size = BinData(4) self.size.init_data_from_int(size) elif issubclass(size.__class__,(BinData,)): self.size = size else: raise Exception('Invalid type for EntryList size: %s' % size.__class__) for i in xrange(int(self.size)): self.data.append(self.type(bstream)) if bstream.exhausted: self.corrupted = True break def blob(self): blob = bytearray('') for entry in self.data: blob += entry.blob() return blob def __getitem__(self, key): if key >= 0 or key < len(self.data): return self.data[key] return None def __setitem__(self, key, value): if key >= 0 and key < len(self.data): self.data[key] = value
def make_bindata(arg): return BinData(arg)
def __init__(self, metric=INF, bindata=None): self.metric = metric self.bindata = bindata or BinData(0, 0)
def encode(self, bindata): """ Encodes data using convolutional code. Public method (API). """ parity_sequence = BinData(0, 0) for parity in self.generate_parities(bindata): parity_sequence += BinData(parity, self.parity_len) return parity_sequence