def test_decide(): """Allow state transition decisions based on collected context other than just the next source symbol. """ e = cpppo.state("enter") e["a"] = a = cpppo.state_input("a", context="a") a[" "] = s1 = cpppo.state_drop("s1") s1[" "] = s1 s1[None] = i1 = cpppo.integer("i1", context="i1") i1[" "] = s2 = cpppo.state_drop("s2") s2[" "] = s2 s2[None] = i2 = cpppo.integer("i2", context="i2") less = cpppo.state("less", terminal=True) greater = cpppo.state("greater", terminal=True) equal = cpppo.state("equal", terminal=True) i2[None] = cpppo.decide("isless", less, predicate=lambda machine, source, path, data: data.i1 < data.i2) i2[None] = cpppo.decide("isgreater", greater, predicate=lambda machine, source, path, data: data.i1 > data.i2) i2[None] = equal source = cpppo.peekable(str("a 1 2")) data = cpppo.dotdict() with cpppo.dfa("comparo", initial=e) as comparo: for i, (m, s) in enumerate(comparo.run(source=source, data=data)): log.info( "%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data, ) assert i == 11 assert s is less source = cpppo.peekable(str("a 33 33")) data = cpppo.dotdict() with cpppo.dfa("comparo", initial=e) as comparo: for i, (m, s) in enumerate(comparo.run(source=source, data=data)): log.info( "%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data, ) assert i == 13 assert s is equal
def main(): """The basic examples in the README""" # Basic DFA that accepts ab+ E = cpppo.state( 'E' ) A = cpppo.state_input( 'A' ) B = cpppo.state_input( 'B', terminal=True ) E['a'] = A A['b'] = B B['b'] = B BASIC = cpppo.dfa( 'ab+', initial=E, context='basic' ) # Composite state machine accepting ab+, ignoring ,[ ]* separators ABP = cpppo.dfa( 'ab+', initial=E, terminal=True ) SEP = cpppo.state_drop( 'SEP' ) ABP[','] = SEP SEP[' '] = SEP SEP[None] = ABP CSV = cpppo.dfa( 'CSV', initial=ABP, context='csv' ) # A regular expression; he default dfa name is the regular expression itself. REGEX = cpppo.regex( initial='(ab+)((,[ ]*)(ab+))*', context='regex' ) data = cpppo.dotdict() for machine in [ BASIC, CSV, REGEX ]: path = machine.context() + '.input' # default for state_input data source = cpppo.peekable( str( 'abbbb, ab' )) with machine: for i,(m,s) in enumerate( machine.run( source=source, data=data )): print( "%s #%3d; next byte %3d: %-10.10r: %r" % ( m.name_centered(), i, source.sent, source.peek(), data.get(path) )) print( "Accepted: %r; remaining: %r\n" % ( data.get(path), ''.join( source ))) print( "Final: %r" % ( data ))
def test_readme(): """The basic examples in the README""" # Basic DFA that accepts ab+ E = cpppo.state( "E" ) A = cpppo.state_input( "A" ) B = cpppo.state_input( "B", terminal=True ) E['a'] = A A['b'] = B B['b'] = B data = cpppo.dotdict() source = cpppo.peekable( str( 'abbbb,ab' )) with cpppo.dfa( initial=E ) as abplus: for i,(m,s) in enumerate( abplus.run( source=source, path="ab+", data=data )): log.info( "%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data ) assert i == 5 assert source.peek() == str(',') # Composite state machine accepting ab+, ignoring ,[ ]* separators CSV = cpppo.dfa( "CSV", initial=E, terminal=True ) SEP = cpppo.state_drop( "SEP" ) CSV[','] = SEP SEP[' '] = SEP SEP[None] = CSV source = cpppo.peekable( str( 'abbbb, ab' )) with cpppo.dfa( initial=CSV ) as r2: for i,(m,s) in enumerate( r2.run( source=source, path="readme_CSV", data=data )): log.info( "%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data ) assert i == 14 assert source.peek() is None
def __init__( self, name=None, **kwds ): name = name or kwds.setdefault( 'context', self.__class__.__name__ ) # Parse the status, and status_ext.size stat = USINT( 'status', context=None ) stat[True] = size = USINT( '_ext.size', extension='_ext.size' ) # Prepare a state-machine to parse each UINT into .UINT, and move it onto the .data list exts = UINT( 'ext_status', extension='.ext_status' ) exts[None] = move_if( 'data', source='.ext_status', destination='.data', initializer=lambda **kwds: [] ) exts[None] = cpppo.state( 'done', terminal=True ) # Parse each status_ext.data in a sub-dfa, repeating status_ext.size times each = cpppo.dfa( 'each', extension='_ext', initial=exts, repeat='_ext.size', terminal=True ) # Only enter the state_ext.data dfa if status_ext.size is non-zero size[None] = cpppo.decide( '_ext.size', predicate=lambda path=None, data=None, **kwds: data[path+'_ext.size'], state=each ) # Otherwise, we're done! size[None] = octets_noop( 'done', terminal=True ) super( status, self ).__init__( name=name, initial=stat, **kwds )
def __init__(self, name=None, **kwds): name = name or kwds.setdefault('context', self.__class__.__name__) # Parse the status, and status_ext.size stat = USINT('status', context=None) stat[True] = size = USINT('_ext.size', extension='_ext.size') # Prepare a state-machine to parse each UINT into .UINT, and move it onto the .data list exts = UINT('ext_status', extension='.ext_status') exts[None] = move_if('data', source='.ext_status', destination='.data', initializer=lambda **kwds: []) exts[None] = cpppo.state('done', terminal=True) # Parse each status_ext.data in a sub-dfa, repeating status_ext.size times each = cpppo.dfa('each', extension='_ext', initial=exts, repeat='_ext.size', terminal=True) # Only enter the state_ext.data dfa if status_ext.size is non-zero size[None] = cpppo.decide('_ext.size', predicate=lambda path=None, data=None, ** kwds: data[path + '_ext.size'], state=each) # Otherwise, we're done! size[None] = octets_noop('done', terminal=True) super(status, self).__init__(name=name, initial=stat, **kwds)
def test_decide(): """Allow state transition decisions based on collected context other than just the next source symbol. """ e = cpppo.state("enter") e['a'] = a = cpppo.state_input("a", context='a') a[' '] = s1 = cpppo.state_drop("s1") s1[' '] = s1 s1[None] = i1 = cpppo.integer("i1", context='i1') i1[' '] = s2 = cpppo.state_drop("s2") s2[' '] = s2 s2[None] = i2 = cpppo.integer("i2", context='i2') less = cpppo.state("less", terminal=True) greater = cpppo.state("greater", terminal=True) equal = cpppo.state("equal", terminal=True) i2[None] = cpppo.decide( "isless", less, predicate=lambda machine, source, path, data: data.i1 < data.i2) i2[None] = cpppo.decide( "isgreater", greater, predicate=lambda machine, source, path, data: data.i1 > data.i2) i2[None] = equal source = cpppo.peekable(str('a 1 2')) data = cpppo.dotdict() with cpppo.dfa("comparo", initial=e) as comparo: for i, (m, s) in enumerate(comparo.run(source=source, data=data)): log.info("%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data) assert i == 12 assert s is less source = cpppo.peekable(str('a 33 33')) data = cpppo.dotdict() with cpppo.dfa("comparo", initial=e) as comparo: for i, (m, s) in enumerate(comparo.run(source=source, data=data)): log.info("%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data) assert i == 14 assert s is equal
def __init__( self, name=None, **kwds ): name = name or kwds.setdefault( 'context', 'header' ) init = cpppo.state( "empty", terminal=True ) init[True] = cmnd = UINT( "command", context="command" ) cmnd[True] = leng = UINT( "length", context="length" ) leng[True] = sess = UDINT( "sess_hdl", context="session_handle" ) sess[True] = stat = UDINT( "status", context="status" ) stat[True] = ctxt = octets( "sndr_ctx", context="sender_context", repeat=8 ) ctxt[True] = opts = UDINT( "options", context="options", terminal=True ) super( enip_header, self ).__init__( name=name, initial=init, **kwds )
def __init__(self, name=None, **kwds): name = name or kwds.setdefault('context', 'header') init = cpppo.state("empty", terminal=True) init[True] = cmnd = UINT("command", context="command") cmnd[True] = leng = UINT("length", context="length") leng[True] = sess = UDINT("sess_hdl", context="session_handle") sess[True] = stat = UDINT("status", context="status") stat[True] = ctxt = octets("sndr_ctx", context="sender_context", repeat=8) ctxt[True] = opts = UDINT("options", context="options", terminal=True) super(enip_header, self).__init__(name=name, initial=init, **kwds)
def __init__(self, name=None, **kwds): """Parse CPF list items 'til .count reached, which should be simultaneous with symbol exhaustion, if caller specified a symbol limit. """ name = name or kwds.setdefault('context', self.__class__.__name__) # A number, and then each CPF item consistes of a type, length and then parsable data. ityp = UINT(context='type_id') ityp[True] = ilen = UINT(context='length') ilen[None] = cpppo.decide('empty', predicate=lambda path=None, data=None, ** kwds: not data[path].length, state=octets_noop('done', terminal=True)) # Prepare a parser for each recognized CPF item type. It must establish one level of # context, because we need to pass it a limit='..length' denoting the length we just parsed. for typ, cls in (self.item_parsers or {}).items(): ilen[None] = cpppo.decide(cls.__name__, state=cls(terminal=True, limit='..length'), predicate=lambda path=None, data=None, ** kwds: data[path].type_id == typ) # If we don't recognize the CPF item type, just parse remainder into .input (so we could re-generate) ilen[None] = urec = octets('unrecognized', context=None, terminal=True) urec[True] = urec # Each item is collected into '.item__', 'til no more input available, and then moved into # place into '.item' (init to []) item = cpppo.dfa('each', context='item__', initial=ityp) item[None] = move_if('move', source='.item__', destination='.item', initializer=lambda **kwds: []) item[None] = cpppo.state('done', terminal=True) # Parse count, and then exactly .count CPF items. loop = UINT(context='count') loop[None] = cpppo.dfa('all', initial=item, repeat='.count', terminal=True) super(CPF, self).__init__(name=name, initial=loop, **kwds)
def __init__( self, name=None, **kwds ): """Parse CPF list items 'til .count reached, which should be simultaneous with symbol exhaustion, if caller specified a symbol limit. """ name = name or kwds.setdefault( 'context', self.__class__.__name__ ) # A number, and then each CPF item consistes of a type, length and then parsable data. ityp = UINT( context='type_id' ) ityp[True] = ilen = UINT( context='length' ) ilen[None] = cpppo.decide( 'empty', predicate=lambda path=None, data=None, **kwds: not data[path].length, state=octets_noop( 'done', terminal=True )) # Prepare a parser for each recognized CPF item type. It must establish one level of # context, because we need to pass it a limit='..length' denoting the length we just parsed. for typ,cls in ( self.item_parsers or {} ).items(): ilen[None] = cpppo.decide( cls.__name__, state=cls( terminal=True, limit='..length' ), predicate=lambda path=None, data=None, **kwds: data[path].type_id == typ ) # If we don't recognize the CPF item type, just parse remainder into .input (so we could re-generate) ilen[None] = urec = octets( 'unrecognized', context=None, terminal=True ) urec[True] = urec # Each item is collected into '.item__', 'til no more input available, and then moved into # place into '.item' (init to []) item = cpppo.dfa( 'each', context='item__', initial=ityp ) item[None] = move_if( 'move', source='.item__', destination='.item', initializer=lambda **kwds: [] ) item[None] = cpppo.state( 'done', terminal=True ) # Parse count, and then exactly .count CPF items. loop = UINT( context='count' ) loop[None] = cpppo.dfa( 'all', initial=item, repeat='.count', terminal=True ) super( CPF, self ).__init__( name=name, initial=loop, **kwds )
def main(): """The basic examples in the README""" # Basic DFA that accepts ab+ E = cpppo.state('E') A = cpppo.state_input('A') B = cpppo.state_input('B', terminal=True) E['a'] = A A['b'] = B B['b'] = B BASIC = cpppo.dfa('ab+', initial=E, context='basic') # Composite state machine accepting ab+, ignoring ,[ ]* separators ABP = cpppo.dfa('ab+', initial=E, terminal=True) SEP = cpppo.state_drop('SEP') ABP[','] = SEP SEP[' '] = SEP SEP[None] = ABP CSV = cpppo.dfa('CSV', initial=ABP, context='csv') # A regular expression; he default dfa name is the regular expression itself. REGEX = cpppo.regex(initial='(ab+)((,[ ]*)(ab+))*', context='regex') data = cpppo.dotdict() for machine in [BASIC, CSV, REGEX]: path = machine.context() + '.input' # default for state_input data source = cpppo.peekable(str('abbbb, ab')) with machine: for i, (m, s) in enumerate(machine.run(source=source, data=data)): print("%s #%3d; next byte %3d: %-10.10r: %r" % (m.name_centered(), i, source.sent, source.peek(), data.get(path))) print("Accepted: %r; remaining: %r\n" % (data.get(path), ''.join(source))) print("Final: %r" % (data))
def test_dfa(): # Simple DFA with states consuming no input. A NULL (None) state transition # doesn't require input for state change. The Default (True) transition # requires input to make the transition, but none of these states consume # it, so it'll be left over at the end. a = cpppo.state("Initial") a[None] = b = cpppo.state("Middle") b[True] = cpppo.state("Terminal", terminal=True) source = cpppo.chainable() i = a.run(source=source) m, s = next(i) assert m is None assert s is not None and s.name == "Middle" try: next(i) assert False, "Expected no more non-transition events" except StopIteration: pass machine = cpppo.dfa(initial=a) with machine: log.info("DFA:") for initial in machine.initial.nodes(): for inp, target in initial.edges(): log.info( "%s <- %-10.10r -> %s" % (cpppo.centeraxis(initial, 25, clip=True), inp, target)) # Running with no input will yield the initial state, with None input; since it is a NULL # state (no input processed), it will simply attempt to transition. This will require the # next input from source, which is empty, so it will return input,state=(None, None) # indicating a non-terminal state and no input left. This gives the caller an opportunity # to reload input and try again. If a loop is detected (same state and input conditions # seen repeatedly), the DFA will terminate; if not in a terminal state, an exception will be # raised. log.info("States; No input") source = cpppo.chainable() sequence = machine.run(source=source) for num in range(10): try: mch, sta = next(sequence) except StopIteration: sequence = None break except cpppo.NonTerminal as e: assert "non-terminal state" in str(e) break inp = source.peek() log.info("%s <- %r" % (cpppo.centeraxis(mch, 25, clip=True), inp)) if num == 0: assert inp is None assert sta.name == "Initial" if num == 1: assert inp is None assert sta.name == "Middle" if num == 2: assert inp is None assert sta is None # And no more no-input transitions assert num < 3 # If we get here, we didn't detect loop assert num == 3 # since the iterator did not stop cleanly (after processing a state's input, # and then trying to determine the next state), it'll continue indefinitely assert sta is None assert sequence is not None # Try with some input loaded into source stream, using an identical generator sequence. # Only the first element is gotten, and is reused for every NULL state transition, and is # left over at the end. log.info("States; 'abc' input") assert source.peek() is None source.chain(b'abc') assert source.peek() == b'a'[0] # python2: str, python3: int sequence = machine.run(source=source) for num in range(10): try: mch, sta = next(sequence) except StopIteration: break inp = source.peek() log.info("%s <- %r", cpppo.centeraxis(mch, 25, clip=True), inp) if num == 0: assert inp == b'a'[0] assert sta.name == "Initial" if num == 1: assert inp == b'a'[0] assert sta.name == "Middle" if num == 2: assert inp == b'a'[0] assert sta.name == "Terminal" assert num < 3 assert num == 3 assert inp == b'a'[0] assert sta.name == "Terminal"
def test_state(): """A state is expected to process its input (perhaps nothing, if its a no-input state), and then use the next input symbol to transition to another state. Each state has a context into a data artifact, into which it will collect its results. We must ensure that all state transitions are configured in the target alphabet; if an encoder is supplied, then all input symbols and all transition symbols will be encoded using it. In this test, all string literals are Unicode (in both Python 2 and 3), so we use a unicode encoder to convert them to symbols.""" unicodekwds = { 'alphabet': unicode if sys.version_info[0] < 3 else str, 'encoder': cpppo.type_unicode_encoder, } s1 = cpppo.state('one', **unicodekwds) s2 = cpppo.state_drop('two', **unicodekwds) s1['a'] = s2 assert s1['a'] is s2 source = cpppo.peeking('abc') # We can run state instances with/without acquisition g = s1.run(source=source) assert next(g) == (None, s2) assert source.peek() == 'a' with pytest.raises(StopIteration): next(g) with s1: g = s1.run(source=source) assert source.peek() == 'a' assert next(g) == (None, s2) assert source.peek() == 'a' try: next(g) assert False, "Should have terminated" except StopIteration: pass assert source.peek() == 'a' # A state machine accepting a sequence of unicode a's a_s = cpppo.state("a_s", **unicodekwds) an_a = cpppo.state_input("a", terminal=True, typecode=cpppo.type_unicode_array_symbol, **unicodekwds) a_s['a'] = an_a an_a['a'] = an_a source = cpppo.peeking('aaaa') data = cpppo.dotdict() with cpppo.dfa(initial=a_s) as aplus: for i, (m, s) in enumerate(aplus.run(source=source)): log.info("%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data) assert i == 5 assert source.peek() is None assert len(data) == 0 # Accepting a's separated by comma and space/pi (for kicks). When the lower level a's machine # doesn't recognize the symbol, then the higher level machine will recognize and discard sep = cpppo.state_drop("sep", **unicodekwds) csv = cpppo.dfa("csv", initial=a_s, terminal=True, **unicodekwds) csv[','] = sep sep[' '] = sep sep['π'] = sep sep[None] = csv source = cpppo.peeking('aaaa, a,π a') data = cpppo.dotdict() with cpppo.dfa(initial=csv) as csvaplus: for i, (m, s) in enumerate( csvaplus.run(source=source, path="csv", data=data)): log.info("%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data) assert i == 18 assert source.peek() is None assert data.csv.input.tounicode() == 'aaaaaa'
def test_dfa(): # Simple DFA with states consuming no input. A NULL (None) state transition # doesn't require input for state change. The Default (True) transition # requires input to make the transition, but none of these states consume # it, so it'll be left over at the end. a = cpppo.state( "Initial" ) a[None] = b = cpppo.state( "Middle" ) b[True] = cpppo.state( "Terminal", terminal=True ) source = cpppo.chainable() i = a.run( source=source ) m,s = next( i ) assert m is None assert s is not None and s.name == "Middle" try: next( i ) assert False, "Expected no more non-transition events" except StopIteration: pass machine = cpppo.dfa( initial=a ) with machine: log.info( "DFA:" ) for initial in machine.initial.nodes(): for inp,target in initial.edges(): log.info( "%s <- %-10.10r -> %s" % ( cpppo.centeraxis( initial, 25, clip=True ), inp, target )) # Running with no input will yield the initial state, with None input; since it is a NULL # state (no input processed), it will simply attempt to transition. This will require the # next input from source, which is empty, so it will return input,state=(None, None) # indicating a non-terminal state and no input left. This gives the caller an opportunity # to reload input and try again. If a loop is detected (same state and input conditions # seen repeatedly), the DFA will terminate; if not in a terminal state, an exception will be # raised. log.info( "States; No input" ) source = cpppo.chainable() sequence = machine.run( source=source ) for num in range( 10 ): try: mch,sta = next( sequence ) except StopIteration: sequence = None break except cpppo.NonTerminal as e: assert "non-terminal state" in str( e ) break inp = source.peek() log.info( "%s <- %r" % ( cpppo.centeraxis( mch, 25, clip=True ), inp )) if num == 0: assert inp is None; assert sta.name == "Initial" if num == 1: assert inp is None; assert sta.name == "Middle" if num == 2: assert inp is None; assert sta is None # And no more no-input transitions assert num < 3 # If we get here, we didn't detect loop assert num == 3 # since the iterator did not stop cleanly (after processing a state's input, # and then trying to determine the next state), it'll continue indefinitely assert sta is None assert sequence is not None # Try with some input loaded into source stream, using an identical generator sequence. # Only the first element is gotten, and is reused for every NULL state transition, and is # left over at the end. log.info( "States; 'abc' input" ) assert source.peek() is None source.chain( b'abc' ) assert source.peek() == b'a'[0] # python2: str, python3: int sequence = machine.run( source=source ) for num in range( 10 ): try: mch,sta = next( sequence ) except StopIteration: break inp = source.peek() log.info( "%s <- %r", cpppo.centeraxis( mch, 25, clip=True ), inp ) if num == 0: assert inp == b'a'[0]; assert sta.name == "Initial" if num == 1: assert inp == b'a'[0]; assert sta.name == "Middle" if num == 2: assert inp == b'a'[0]; assert sta.name == "Terminal" assert num < 3 assert num == 3 assert inp == b'a'[0] assert sta.name == "Terminal"
def test_state(): """A state is expected to process its input (perhaps nothing, if its a no-input state), and then use the next input symbol to transition to another state. Each state has a context into a data artifact, into which it will collect its results. We must ensure that all state transitions are configured in the target alphabet; if an encoder is supplied, then all input symbols and all transition symbols will be encoded using it. In this test, all string literals are Unicode (in both Python 2 and 3), so we use a unicode encoder to convert them to symbols.""" unicodekwds = { 'alphabet': unicode if sys.version_info[0] < 3 else str, 'encoder': cpppo.type_unicode_encoder, } s1 = cpppo.state( 'one', **unicodekwds ) s2 = cpppo.state_drop( 'two', **unicodekwds ) s1['a'] = s2 assert s1['a'] is s2 source = cpppo.peeking( 'abc' ) # We can run state instances with/without acquisition g = s1.run( source=source ) assert next( g ) == (None, s2) assert source.peek() == 'a' with pytest.raises(StopIteration): next( g ) with s1: g = s1.run( source=source ) assert source.peek() == 'a' assert next( g ) == (None, s2) assert source.peek() == 'a' try: next( g ) assert False, "Should have terminated" except StopIteration: pass assert source.peek() == 'a' # A state machine accepting a sequence of unicode a's a_s = cpppo.state( "a_s", **unicodekwds ) an_a = cpppo.state_input( "a", terminal=True, typecode=cpppo.type_unicode_array_symbol, **unicodekwds ) a_s['a'] = an_a an_a['a'] = an_a source = cpppo.peeking( 'aaaa' ) data = cpppo.dotdict() with cpppo.dfa( initial=a_s ) as aplus: for i,(m,s) in enumerate( aplus.run( source=source )): log.info( "%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data ) assert i == 5 assert source.peek() is None assert len( data ) == 0 # Accepting a's separated by comma and space/pi (for kicks). When the lower level a's machine # doesn't recognize the symbol, then the higher level machine will recognize and discard sep = cpppo.state_drop( "sep", **unicodekwds ) csv = cpppo.dfa( "csv", initial=a_s , terminal=True, **unicodekwds ) csv[','] = sep sep[' '] = sep sep['π'] = sep sep[None] = csv source = cpppo.peeking( 'aaaa, a,π a' ) data = cpppo.dotdict() with cpppo.dfa( initial=csv ) as csvaplus: for i,(m,s) in enumerate( csvaplus.run( source=source, path="csv", data=data )): log.info( "%s #%3d -> %10.10s; next byte %3d: %-10.10r: %r", m.name_centered(), i, s, source.sent, source.peek(), data ) assert i == 18 assert source.peek() is None assert data.csv.input.tounicode() == 'aaaaaa'
class Object(object): """An EtherNet/IP device.Object is capable of parsing and processing a number of requests. It has a class_id and an instance_id; an instance_id of 0 indicates the "class" instance of the device.Object, which has different (class level) Attributes (and may respond to different commands) than the other instance_id's. Each Object has a single class-level parser, which is used to register all of its available service request parsers. The next available symbol designates the type of service request, eg. 0x01 ==> Get Attributes All. These parsers enumerate the requests that are *possible* on the Object. Later, when the Object is requested to actually process the request, a decision can be made about whether the request is *allowed*. The parser knows how to parse any requests it must handle, and any replies it can generate, and puts the results into the provided data artifact. Assuming Obj is an instance of Object, and the source iterator produces the incoming symbols: 0x52, 0x04, 0x91, 0x05, 0x53, 0x43, 0x41, 0x44, #/* R...SCAD */ 0x41, 0x00, 0x14, 0x00, 0x02, 0x00, 0x00, 0x00, #/* A....... */ then we could run the parser: data = cpppo.dotdict() with Obj.parse as machine: for m,w in machine.run( source=source, data=data ): pass and it would parse a recognized command (or reply, but that would be unexpected), and produce the following entries (in data, under the current context): 'service': 0x52, 'path.segment': [{'symbolic': 'SCADA', 'length': 5}], 'read_frag.elements': 20, 'read_frag.offset': 2, Then, we could process the request: proceed = Obj.request( data ) and this would process a request, converting it into a reply (any data elements unchanged by the reply remain): 'service': 0xd2, # changed: |= 0x80 'status': 0x00, # default if not specified 'path.segment': [{'symbolic': 'SCADA', 'length': 5}], # unchanged 'read_frag.elements': 20, # unchanged 'read_frag.offset': 2, # unchanged 'read_frag.type': 0x00c3, # produced for reply 'read_frag.data': [ # produced for response 0x104c, 0x0008, 0x0003, 0x0002, 0x0002, 0x0002, 0x000e, 0x0000, 0x0000, 0x42e6, 0x0007, 0x40c8, 0x40c8, 0x0000, 0x00e4, 0x0000, 0x0064, 0x02b2, 0x80c8 ] 'input': bytearray( [ # encoded response payload 0xd2, 0x00, #/* ....,... */ 0x00, 0x00, 0xc3, 0x00, 0x4c, 0x10, 0x08, 0x00, #/* ....L... */ 0x03, 0x00, 0x02, 0x00, 0x02, 0x00, 0x02, 0x00, #/* ........ */ 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe6, 0x42, #/* .......B */ 0x07, 0x00, 0xc8, 0x40, 0xc8, 0x40, 0x00, 0x00, #/* ...@.@.. */ 0xe4, 0x00, 0x00, 0x00, 0x64, 0x00, 0xb2, 0x02, #/* ....d... */ 0xc8, 0x80 #/* .@ */ ] The response payload is also produced as a bytes array in data.input, encoded and ready for transmission, or encapsulation by the next higher level of request processor (eg. a Message_Router, encapsulating the response into an EtherNet/IP response). """ max_instance = 0 lock = threading.Lock() service = {} # Service number/name mappings transit = {} # Symbol to transition to service parser on # The parser doesn't add a layer of context; run it with a path= keyword to add a layer parser = cpppo.dfa(service, initial=cpppo.state('select'), terminal=True) @classmethod def register_service_parser(cls, number, name, short, machine): """Registers a parser with the Object. May be invoked during import; no logging.""" assert number not in cls.service and name not in cls.service, \ "Duplicate service #%d: %r registered for Object %s" % ( number, name, cls.__name__ ) cls.service[number] = name cls.service[name] = number cls.transit[number] = chr( number) if sys.version_info.major < 3 else number cls.parser.initial[cls.transit[number]] \ = cpppo.dfa( name=short, initial=machine, terminal=True ) GA_ALL_NAM = "Get Attributes All" GA_ALL_CTX = "get_attributes_all" GA_ALL_REQ = 0x01 GA_ALL_RPY = GA_ALL_REQ | 0x80 GA_SNG_NAM = "Get Attribute Single" GA_SNG_REQ = 0x0e GA_SNG_RPY = GA_SNG_REQ | 0x80 SA_SNG_NAM = "Set Attribute Single" SA_SNG_REQ = 0x10 SA_SNG_RPY = SA_SNG_REQ | 0x80 def __init__(self, name=None, instance_id=None): """Create the instance (default to the next available instance_id). An instance_id of 0 holds the "class" attributes/commands. """ self.name = name or self.__class__.__name__ # Allocate and/or keep track of maximum instance ID assigned thus far. if instance_id is None: instance_id = self.__class__.max_instance + 1 if instance_id > self.__class__.max_instance: self.__class__.max_instance = instance_id self.instance_id = instance_id (log.normal if self.instance_id else log.info)( "%24s, Class ID 0x%04x, Instance ID %3d created", self, self.class_id, self.instance_id) instance = lookup(self.class_id, instance_id) assert instance is None, \ "CIP Object class %x, instance %x already exists" % ( self.class_id, self.instance_id ) # # directory.1.2.None == self # self.attribute == directory.1.2 (a dotdict), for direct access of our attributes # self.attribute = directory.setdefault( str(self.class_id) + '.' + str(instance_id), cpppo.dotdict()) self.attribute['0'] = self # Check that the class-level instance (0) has been created; if not, we'll create one using # the default parameters. If this isn't appropriate, then the user should create it using # the appropriate parameters. if lookup(self.class_id, 0) is None: self.__class__(name='meta-' + self.name, instance_id=0) if self.instance_id == 0: # Set up the default Class-level values. self.attribute['1'] = Attribute('Revision', INT, default=0) self.attribute['2'] = MaxInstance('Max Instance', INT, class_id=self.class_id) self.attribute['3'] = NumInstances('Num Instances', INT, class_id=self.class_id) # A UINT array; 1st UINT is size (default 0) self.attribute['4'] = Attribute('Optional Attributes', INT, default=0) def __str__(self): return self.name def __repr__(self): return "(0x%02x,%3d) %s" % (self.class_id, self.instance_id, self) def request(self, data): """Handle a request, converting it into a response. Must be a dotdict data artifact such as is produced by the Object's parser. For example, a request data containing either of the following: { 'service': 0x01, 'get_attributes_all': True, } should run the Get Attribute All service, and return True if the channel should continue. In addition, we produce the bytes used by any higher level encapsulation. TODO: Validate the request. """ result = b'' if log.isEnabledFor(logging.DETAIL): log.detail("%s Request: %s", self, enip_format(data)) try: # Validate the request. As we process, ensure that .status is set to reflect the # failure mode, should an exception be raised. Return True iff the communications # channel should continue. data.status = 0x08 # Service not supported, if not recognized data.pop('status_ext', None) if (data.get('service') == self.GA_ALL_REQ or 'get_attributes_all' in data and data.setdefault( 'service', self.GA_ALL_REQ) == self.GA_ALL_REQ): pass else: raise AssertionError("Unrecognized Service Request") # A recognized request; process the request data artifact, converting it into a reply. data.service |= 0x80 if data.service == self.GA_ALL_RPY: # Get Attributes All. Collect up the bytes representing the attributes. Replace # the place-holder .get_attribute_all=True with a real dotdict. data.status = 0x08 # Service not supported, if we fail to access an Attribute result = b'' a_id = 1 while str(a_id) in self.attribute: result += self.attribute[str(a_id)].produce() a_id += 1 data.get_attributes_all = cpppo.dotdict() data.get_attributes_all.data = bytearray(result) data.status = 0x00 data.pop('status_ext', None) # TODO: Other request processing here... else: raise AssertionError("Unrecognized Service Reply") except Exception as exc: log.warning( "%r Service 0x%02x %s failed with Exception: %s\nRequest: %s\n%s\nStack %s", self, data.service if 'service' in data else 0, (self.service[data.service] if 'service' in data and data.service in self.service else "(Unknown)"), exc, enip_format(data), ''.join(traceback.format_exception(*sys.exc_info())), ''.join(traceback.format_stack())) assert data.status != 0x00, \ "Implementation error: must specify .status error code before raising Exception" pass # Always produce a response payload; if a failure occurred, will contain an error status. # If this fails, we'll raise an exception for higher level encapsulation to handle. data.input = bytearray(self.produce(data)) log.detail("%s Response: %s: %s", self, self.service[data.service], enip_format(data)) return True # We shouldn't be able to terminate a connection at this level @classmethod def produce(cls, data): result = b'' if (data.get('service') == cls.GA_ALL_REQ or 'get_attributes_all' in data and data.setdefault( 'service', cls.GA_ALL_REQ) == cls.GA_ALL_REQ): # Get Attributes All result += USINT.produce(data.service) result += EPATH.produce(data.path) elif data.get('service') == cls.GA_ALL_RPY: # Get Attributes All Reply result += USINT.produce(data.service) result += b'\x00' # reserved result += status.produce(data) result += octets_encode(data.get_attributes_all.data) else: assert False, "%s doesn't recognize request/reply format: %r" % ( cls.__name__, data) return result