def try_multiple(): print("--- try_multiple() ---") global cm g = Datamap('mymap', String('foo', 2, occurs=3), Int2('bar')) cm = g g.buffer = Abuf(8) g.buffer.value = b'abKIyz\x11\x00' dump(g.buffer) dir(g.foo) print(g.foo) print(g.foo[1]) for i, v in enumerate(g.foo): print(i, v) print("list(g.foo) =", list(g.foo)) print("g.bar=%d" % g.bar) g.foo[1] = 'QQQ' print(list(g.foo)) g.foo = ('hh', 'TT', 'ww') print(list(g.foo)) dump(g.buffer) try: g.foo[3] = 'RRR' except StopIteration as e: print("Caught StopIteration exception for g.foo[3]='RRR', continue") dir(e) print(e) pass return g
def try_MU_1toN_in_PE(): print("--- try_MU_1toN_in_PE() ---") global cm g = Datamap( 'pemap', Int2('i1'), Uint1('pg_count'), Periodic(Datamap('pg', String('pe', 2), Int1('pf')), occurs=lambda: g.pg_count), Periodic( Datamap( 'qg', String('qe', 2), Uint1('mu_count'), String('mu', 2, occurs=lambda: -1) # count 1 byte before field ), occurs=2), ) print(g.dmlen) cm = g g.buffer = Abuf(1024) if 0: print('\noccurs=0') g.buffer.value = b'\x01\x00\x01\x00\x02\x00' g.prepare() dump(g.buffer) print(g.pe) g.dprint() try: print(g.pe[0]) except StopIteration as e: dir(e) print(e) pass if 1: print('\noccurs=3, muocc=3,1') g.buffer.value = b'\x02\x03P1\xffP2\xfeP3\xfdQ1\x03MaMeMiQ2\x01Na' g.prepare() dump(g.buffer) print(g.pg) print(g.pg[1]) for i, v in enumerate(g.pg): print(i, v, v.pe, v.pf) for i, v in enumerate(g.qg): print(i, v, v.qe, len(v.mu)) for j, w in enumerate(v.mu): print('MU', j, w, w.mu) g.dprint() return g
def try_periodic_1toN(): print("--- try_periodic_1toN() ---") global cm g = Datamap( 'pemap', Int2('i1'), Uint1('pe_count'), Periodic(Datamap('pe', String('ps', 2), Int2('pi')), occurs=lambda: g.pe_count), Periodic(Datamap('qe', String('qs', 3), Uint1('qi')), occurs=2), Int2('i2')) print(g.dmlen) cm = g g.buffer = Abuf(1024) print('\noccurs=0') g.buffer.value = b'\x11\x00\x00pa1\x01pa2\x02\x22\x00' g.prepare() dump(g.buffer) print(g.pe) try: print(g.pe[0]) except StopIteration as e: dir(e) print(e) pass try: g.pe[1].ps = 'QQQ' except StopIteration as e: dir(e) print(e) pass for i, v in enumerate(g.pe): print(i, v, v.ps, v.pi) print(g.i2) dump(g.buffer) print('\noccurs=3') g.buffer.value = b'\x11\x00\x03ab\x01\x00KI\x02\x00yz\x03\x00pa1\x01pa2\x02\x22\x00' g.prepare() dump(g.buffer) print(g.pe) print(g.pe[1]) for i, v in enumerate(g.pe): print(i, v, v.ps, v.pi) print(g.i2) g.pe[1].ps = 'QQQ' g.pe[2].pi = 33 dump(g.buffer) g.dprint() return g
def try_string(ebcdic=0,encoding=''): global cm p = Datamap( 'Test strings encoding=%s, ebcdic=%d' % (encoding,ebcdic), String( 'str8', 8), String( 'str4', 4), String( 'str10', 10), encoding=encoding,ebcdic=ebcdic ) cm=p sz=p.getsize() # return size of datamap print() print( '=== Datamap=%s, size=%d ===\n' % (p.dmname, sz)) print( ' encoding = %s' % (p.encoding)) print( ' ebcdic = %d' % (p.ebcdic)) p.buffer = Abuf(sz) # set field values p.str8 = b'abcd56' # string p.str4 = b'a1' # string p.str10 = u'a+צה�' # string print( '-- print some numbers --') print( p.str8) print( p.str4) print( p.str10) print( '--- Dumping datamap structure ---') dump(p.buffer) print( '---Printing datamap structure with dprint() ---\n') p.dprint() # print datamap print( '---Printing datamap structure with lprint() ---\n') p.lprint(header=1) # print datamap header p.lprint() # print datamap return p
def try_periodic(): print("--- try_periodic() ---") global cm g = Datamap('mymap', Int2('i1'), Periodic(Datamap('pe', String('ps', 2), Int2('pi')), occurs=3), Int2('i2')) cm = g print(g.dmlen) g.buffer = Abuf(g.dmlen) g.buffer.value = b'\x11\x00ab\x01\x00KI\x02\x00yz\x03\x00\x22\x00' dump(g.buffer) dir(g.pe) print(g.pe) print(g.pe[1]) for i, v in enumerate(g.pe): print(i, v, v.ps, v.pi) print(g.i2) g.pe[1].ps = 'QQQ' g.pe[2].pi = 33 dump(g.buffer) return g
def try_multiple_1toN(): print("--- try_multiple_1toN() ---") global cm g = Datamap('mymap', Uint1('foo_count'), String('foo', 2, occurs=lambda: g.foo_count), Int2('bar')) cm = g g.buffer = Abuf(1024) print('occurs=3') g.buffer.value = b'\x03abKIyz\x11\x00' g.prepare() dump(g.buffer) dir(g.foo) print(g.foo) print(g.foo[1]) for i, v in enumerate(g.foo): print(i, v) print(list(g.foo)) print(g.bar) g.foo[1] = 'QQQ' print(list(g.foo)) g.foo = ('hh', 'TT', 'ww') print(list(g.foo)) dump(g.buffer) try: g.foo[3] = 'RRR' except StopIteration as e: dir(e) print(e) pass print('occurs=0') g.buffer.value = b'\x00\x11\x00' g.prepare() dump(g.buffer) dir(g.foo) print(g.foo) try: g.foo[0] = 'RRR' except StopIteration as e: dir(e) print(e) pass for i, v in enumerate(g.foo): print(i, v) print(list(g.foo)) print(g.bar) return g
b'\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf', b'\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf', b'\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef', b'\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff') tbuf=Abuf(256); ebuf=Abuf(256); abuf=Abuf(256); for t in tt: tbuf.write(t) ebuf[0:256]=tbuf[0:256] conv.asc2ebc(ebuf,0,len(ebuf)) abuf[0:256]=ebuf[0:256] conv.ebc2asc(abuf,0,len(abuf)) print( 'tbuf has characters from 0 to 255') dump.dump(tbuf,header='tbuf') print( 'ebuf is contents after asc2ebc() call') dump.dump(ebuf,header='ebuf') print( 'abuf is contents after ebc2asc() call') dump.dump(abuf,header='abuf') print( "str2ebc('ABC'): ",hexlify(conv.str2ebc('ABC')) ) print( "str2asc(b'\\xC1\\xC2\\xC3'):",hexlify(conv.str2asc(b'\xC1\xC2\xC3')) )
if not oldest: oldest = i2dt(smf.dte, smf.tme) # datetime() of first record after skipping latest = (smf.dte, smf.tme) if smf.rty != 30: if verbose & 8: print('--- skipping other SMF record %d ---' % recno) smf.dprint(skipnull=1) continue smfrec30.buffer = record if verbose & 8: dump(record[0:rlen], header='SMF30 record %d' % recno) if rlen < smfrec30.dmlen: print( 'SMF Record %d has invalid record length %d (shorter than %d)' % (recno, rlen, smfrec30.dmlen)) break smfrec30.buffer = record # update underlying buffer if verbose & 8: print('--- Record %d: SMF30 ---' % recno) smfrec30.dprint(skipnull=1) if select and smfrec30.ion: # selection and id section exists s30id.buffer = record s30id.offset = smfrec30.iof # offset from start of record with RDW
def readrec(f, recform='', dumphdr='', numrec=0, skiprec=0, ecodec='cp037', debug=0): """ readrec - Generator function to read records with special record format specified in recform :param f: filehandle of open file :param recform: record format to process - 'RDW' variable record format (2 bytes length, Network byte order) return record without RDW header (exclusive) - 'RDW+' variable record format (2 bytes length, Network byte order) return record including RDW header Note: for segmented records rlen in RDW header is length of first segment and not the whole record - 'BDW' variable record blocked: input includes Block Descriptor Word which is skipped return record without RDW header (exclusive) - 'BDW+' same as BDW but return record including RDW header - 'EXCL4' exclusive 4 bytes length, native byte order :param dumphdr: header text of record; if not empty: prints record :param ecodec: Ebcdic codec for character interpretation when dumping records :param debug: 1 - print RDW information Example usage:: >> for rec in readrec(f,recform='RDW',dumphdr='my_records'): >> process(rec) """ V = 1 # variable records VB = 2 # variable blocked includes variable WITH_RDW = 4 # return record with RDW header bu = None # BytesIO object recfm = 0 block_rlen = 0 if recform.startswith('RDW'): recfm = V elif recform.startswith('BDW'): recfm = VB if recform.endswith('+'): recfm |= WITH_RDW if recfm & (V | VB): i = 0 # counting complete/logical records while i < skiprec: # skipping records loop rdws = f.read(4) if len(rdws) < 4: return rdw.buffer = rdws # use rdws as underlying buffer rlen = rdw.rlen if rlen > 0x7fff: dwtype = 'record' if recfm & VB and block_rlen == 0: dwtype = 'block' raise BaseException( 'Invalid %s length %s exceeds 32k-1 in record %d' % (dwtype, rlen, i + 1)) if recfm & VB: if block_rlen > 4: # reduce remaining block length by record len block_rlen -= rlen else: # need to consume BDW block header 4 bytes block_rlen = rlen if debug & 1: dump(rdws[0:4], header='Block Descriptor Word') continue # need to read RDW if rdw.rlen > 4: # it's a record record = f.seek(rdw.rlen - 4, os.SEEK_CUR) # skip record if rdw.seg in (SEGFIRST, SEGMIDDLE): if debug & 1: print('Skipping %s len(%04x) in logical record %d' % (segmenttype(rdw.seg), rdw.rlen, i)) continue # only count last or unsegmented records i += 1 if debug & 1: print('Skipping %s len(%04x) in logical record %d' % (segmenttype(rdw.seg), rdw.rlen, i)) maxrec = skiprec + numrec while 1: rdws = f.read(4) if len(rdws) < 4: return rdw.buffer = rdws # use rdws as underlying buffer rlen = rdw.rlen if rlen > 0x7fff: dwtype = 'record' if recfm & VB and block_rlen == 0: dwtype = 'block' raise BaseException( 'Invalid %s length %s exceeds 32k-1 in record %d' % (dwtype, rlen, i + 1)) if recfm & VB: if block_rlen > 4: # still records in block block_rlen -= rlen else: # need to consume BDW block header 4 bytes block_rlen = rlen if debug & 1: dump(rdws[0:4], header='Block Descriptor Word') continue # need to read RDW if rlen < 5: # empty record yield b'' if debug & 1: print('Reading %s len(%04x) in logical record %d' % (segmenttype(rdw.seg), rdw.rlen, i + 1)) if rdw.seg: # copy any segmented record to buffer if rdw.seg == SEGFIRST: # first segment bu = BytesIO() if recform.endswith('+'): # record to include RDW bu.write(rdws) bu.write(f.read(rlen - 4)) if rdw.seg == SEGLAST: # last segment record = bu.getvalue() # return collected segments if dumphdr: dump(record, header='\n%s: %d, total length %04X' % (dumphdr, i + 1, len(record)), ecodec=ecodec) yield record else: continue # do not count numrec for first/middle segment else: # record is not segmented i.e. complete if recform.endswith('+'): # record to include RDW f.seek(-4, os.SEEK_CUR) # rewind to record start else: rlen -= 4 record = f.read(rlen) if dumphdr: rdwx = ' (%04X,%04X)' % (rdw.rlen, rdw.seg) dump(record, header='\n%s: %d%s' % (dumphdr, i + 1, rdwx), ecodec=ecodec) yield record i += 1 if numrec and i > maxrec: return # while loop elif recform == 'EXCL4': for i in range(skiprec): e4s = f.read(4) if len(e4s) < 4: return excl4.buffer = e4s # use rdws as underlying buffer if excl4.rlen > 4: # record record = f.seek(excl4.rlen, os.SEEK_CUR) # skip record i = skiprec # i is total record count starting from 1 maxrec = skiprec + numrec while 1: i += 1 if numrec and i > maxrec: return e4s = f.read(4) if len(e4s) < 4: return excl4.buffer = e4s # use rdws as underlying buffer rlen = excl4.rlen if rlen < 1: # empty record yield b'' else: record = f.read(rlen) if dumphdr: dump(record, header='\n%s: %d' % (dumphdr, i), ecodec=ecodec) yield record # while loop elif recform == '': # textfile, read data is str for i in range(skiprec): record = f.readline() if record == '': # end of file return i = skiprec # i is total record count starting from 1 maxrec = skiprec + numrec while 1: i += 1 if numrec and i > maxrec: return # all processed else: record = f.readline() if record == '': # not b'' return # end of file record = record.rstrip( ) # remove trailing whitespace and newline if dumphdr: dump(record, header='\n%s: %d' % (dumphdr, i), ecodec=ecodec) yield record # while loop else: raise BaseException('Invalid recform %r specified' % recform)
print('Sequential dataset %s%s copied to local %s' % (dsn, ' binary' if binary else '' ,fname)) ftp.quit() # do not reuse ftp. # now the file is locally accessible if verbose&4: # dump records with open(fname,'rb') as f: if binary and not recform: MAXSIZE = 0x10000 # 32k saddr = 0 while 1: fc = f.read(MAXSIZE) rlen = len(fc) if rlen: dump(fc,startaddr=saddr,header=fname) saddr+=rlen if rlen < MAXSIZE: break if saddr == 0: print('File %s is empty' % fname) else: for record in readrec( f, recform=recform, numrec=numrec, skiprec=skiprec, dumphdr='Record'): pass # Copyright 2004-ThisYear Software AG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
def pdsdir(pdsname, mmatch='', info=0, encoding='cp037', verbose=0): """pdsdir - generator function to read directory entries in a partitioned dataset (extended) :param pdsname: name of the partitioned datset :param mmatch: matching pattern according to fnmatch modules rules (optional) :param info: return member names (info=0) or Memberinfo tuple (info=1) Example: >>> for name in pdsdir('mm.source', mmatch='COBL*'): print(name) >>> for member in pdsdir('mm.source', mmatch='COBL*', info=1): print(member.name, member.changed, member.size, member.id, member.alias) """ mi = MemberInfo(encoding=encoding) mmatch = mmatch.upper() # convert to upper case with open("//'%s'" % pdsname, 'rb') as f: print('pdsname=%s opened' % pdsname) drl = Drlen() while 1: drlbuf = f.read(2) if len(drlbuf) < 2: return drl.buffer = drlbuf restlen = drl.drlen - 2 membuf = f.read(254) # earch dir record is 256 bytes mi.buffer = membuf mi.offset = 0 if verbose > 2: dump(membuf) while restlen >= 12: if mi.ttr == b'\x00\x00\x00': # last and empty entry in directory? if verbose: print('leaving pdsdir()') return uinfolen = 2 * (mi.flag & 0x1f) restlen -= uinfolen + 12 # basic member entry len + user info if not mmatch or fnmatch(mi.name, mmatch): # case insensitive match if verbose: print(mi.name, '\tfound') if verbose > 1: dump(membuf[mi.offset:mi.offset + 12 + uinfolen]) if info: yield mi.getinfo() # return Member named tuple else: yield mi.name # return the member name elif verbose > 2: print('%08s not matched' % mi.name) mi.offset += uinfolen + 12 continue
def urbslog(ss): # log non-public URBS data if ss.urbsst == URBSSTHB: # heartbeat hb = Urbsh(buffer=ss.buffer, offset=ss.offset + urb.URBSL) hb.ebcdic = ss.ebcdic # copy architecture from URBS to URBSH hb.byteOrder = ss.byteOrder now = int(time.time()) ctm = int((hb.urbshctm >> 32) * 1.048576 - sec1970) ttm = int((hb.urbshttm >> 32) * 1.048576 - sec1970) sdwf, sdw = (1, int((hb.urbshsdw >> 32) * 1.048576 - sec1970)) if hb.urbshsdw else (0, 0) sdrf, sdr = (1, int((hb.urbshsdr >> 32) * 1.048576 - sec1970)) if hb.urbshsdr else (0, 0) btmf, btm = (1, int((hb.urbshbtm >> 32) * 1.048576 - sec1970)) if hb.urbshbtm else (0, 0) btnf, btn = (1, int((hb.urbshbtn >> 32) * 1.048576 - sec1970)) if hb.urbshbtn else (0, 0) btof, bto = (1, int((hb.urbshbto >> 32) * 1.048576 - sec1970)) if hb.urbshbto else (0, 0) slogtime = ( ' %s heartbeat written to SLOG, %s ago\n' % (sstckd(hb.urbshsdw), intervalstr(now - sdw))) if sdwf else '' delogtime = ( ' %s heartbeat delogged from SLOG, %s ago\n' % (sstckd(hb.urbshsdr), intervalstr(now - sdr))) if sdrf else '' infotime = ( ' %s ETB Info time, %s ago\n' % (sstckd(hb.urbshbtm), intervalstr(now - btm))) if btmf else '' tinewest = ( ' %s newest item entered Broker queue, %s ago\n' % (sstckd(hb.urbshbtn), intervalstr(now - btn))) if btnf else '' tioldest = ( ' %s oldest item entered Broker queue, %s ago\n' % (sstckd(hb.urbshbto), intervalstr(now - bto))) if btof else '' cubyt = (' %d current number of bytes in queue\n' % (hb.urbshbcb, ) ) if hb.urbshbcb or (hb.urbshtyp == URBSHTYP_ETB) else '' cumsg = (' %d current number of messages in queue\n' % (hb.urbshbcm, )) if hb.urbshbcm or (hb.urbshtyp == URBSHTYP_ETB) else '' cuuow = (' %d current number of UOWs in queue\n' % (hb.urbshbcu, ) ) if hb.urbshbcu or (hb.urbshtyp == URBSHTYP_ETB) else '' adalog.debug('URBSH -- Heartbeat received at %s\n' \ ' Destination %s\n'\ ' %s Last messaging system commit, %s ago\n'\ ' %s TA last processed in destination, %s ago\n'\ '%s%s%s%s%s%s%s%s' %( time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(now)),ss.urbsdnam, sstckd(hb.urbshctm), intervalstr(now-ctm), sstckd(hb.urbshttm), intervalstr(now-ttm), slogtime, delogtime, infotime, tinewest, tioldest, cubyt, cumsg, cuuow )) dump(ss.buffer[ss.offset + urb.URBSL:ss.offset + urb.URBSL + URBSHL], header=' Reptor Heartbeat Data', prefix=' ', log=adalog.debug) # ecodec=repli.ecodec) return
def process(self, buffer, length, status=0, substat=None): """ Process a message received from Reptor :param status: uow status :param substat: may reference the subscription status class instance for processing subscription data """ self.pcount += 1 # count number of process() calls if length < 32: # minimum URB header is 32 self.perrors += 1 # count processing errors log.error('Replication Record %d: too short' % self.pcount) dump(buffer[:length], log=log.error, ecodec=self.ecodec) return # the first record or buffer decides for the rest if it is # - messages from broker or ADARIS: always starts with URBH # - ASCII or EBCDIC format of the URBs # - other type is from ADARPE: URBs in EBCDIC and Network byte order # if self.pcount == 1: if 1: # ADARIS binary output may have records starting with URBH and other URB types # intermixed # datamap.dataIsEbcdic=0 (default) # datamap.setNativeByteOrder() (default) if buffer[0:3] == b'\xE4\xD9\xC2': # URBx in EBCDIC? # datamap.dataIsEbcdic = 1 setURBebcdic(True) if buffer[3:4] == b'\xC8': self.typ |= typURBH if buffer[10:12] == b'\x00\x01': # datamap.setNetworkByteOrder() setURBnwbo() else: # ADARPE type (no URBH) # datamap.setNetworkByteOrder() self.typ &= ~typURBH # reset possible URBH type setURBnwbo() elif buffer[0:3] == b'URB': if buffer[3:4] == b'H': self.typ |= typURBH # setURBebcdic(False) # default if buffer[10:12] == b'\x00\x01': # datamap.setNetworkByteOrder() setURBnwbo() else: self.typ &= ~typURBH # reset possible URBH type else: self.perrors += 1 # count processing errors log.error('Replication Record % d: no URB* prefix detected' % self.pcount) dump(buffer, header='Buffer received', log=log.error, ecodec=self.ecodec) raise ReptorError('No URB* prefix detected but "%s"' % (buffer[:3], )) if __debug__: if self.logging & LOGallURB: # any logging log.debug('URB in EBCDIC = %d, URBH messages = %d' % (datamap.dataIsEbcdic, self.typ & typURBH)) if self.typ & typURBH: hh.buffer = buffer hh.offset = 0 if hh.urbheye != URBHEYE: self.perrors += 1 # count processing errors log.error('Replication Record % d: no URBH prefix detected' % self.pcount) dump(buffer[:length], log=log.error, ecodec=self.ecodec) return if hh.urbhvers != URBHVER1: log.error('Record %d: incompatible URBH version %s received / expected %s:' %\ (self.pcount, hh.urbhvers, URBHVER1)) dump(buffer[:length], log=log.error, ecodec=self.ecodec) return total_length = hh.urbhlent if __debug__: if self.logging & LOGURBH: log.debug( '\n\nURBH %s msg %d total-len %d rpid %d/%d %s\n' % (stck.sstckd(hh.urbhtime), hh.urbhmsnr, hh.urbhlent, hh.urbhrpid, hh.urbhrpni, hh.urbhname)) if self.handUrbh: self.handUrbh(hh, substat) offset = URBHL rest_length = total_length - URBHL else: offset = 0 rest_length = length while rest_length > 4: nn.buffer = buffer nn.offset = offset if nn.eye == URBDEYE: dd.buffer = buffer dd.offset = offset if __debug__: if self.logging & LOGURBD: log.debug('URBD(%d) %s' % (dd.urbddsnr, urbd_type[dd.urbdtyp])) if self.logging & LOGURBDD: datalen = dd.urbdlend dump(dd.buffer[offset + URBDL:offset + URBDL + datalen], header=None, prefix=' ', log=log.debug, ecodec=self.ecodec) if self.handUrbd: self.handUrbd(dd, substat) rest_length -= dd.urbdlen offset += dd.urbdlen elif nn.eye == URBEEYE: ee.buffer = buffer ee.offset = offset if __debug__: if self.logging & LOGURBE: log.debug('URBE -- end of transaction %d for subscription %s' % \ (ee.urbetsnr, ee.urbesnam)) rest_length -= URBEL offset += URBEL if self.handUrbe: self.handUrbe(ee, substat) elif nn.eye == URBREYE: rr.buffer = buffer rr.offset = offset if __debug__: if self.logging & LOGURBH: log.debug('URBR -- %s record fnr %d isn %d' % \ (urbr_type[rr.urbrtyp], rr.urbrfnr, rr.urbrisn)) if rr.urbrrsp: rspTxt = '' rspTxt = adaerror.rsptext(rr.urbrrsp, rr.urbrsubc, erri=rr.urbrerrc) log.debug(rspTxt) if self.handUrbr: self.handUrbr(rr, substat) rest_length -= URBRL offset += URBRL elif nn.eye == URBTEYE: tt.buffer = buffer tt.offset = offset if __debug__: if self.logging & LOGURBT: if tt.urbtarc > b'\x00': encstr = 'arc=%d, acode=%s, wcode=%s' %\ (ord(tt.urbtarc), ecscodec.getcodec(tt.urbtacod), ecscodec.getcodec(tt.urbtwcod)) else: encstr = '' log.debug('URBT -- transaction %d with %d records for subscription %s\n'\ ' guid %s\n'\ ' ET %s dbid %d/%d\n'\ ' RP %s rpid %d/%d %s' % \ (tt.urbttsnr, tt.urbtrcnt, tt.urbtsnam, binascii.hexlify(tt.urbtguid), stck.sstckd(tt.urbtttim), tt.urbtdbid, tt.urbtnuci, stck.sstckd(tt.urbtptim), tt.urbtrpid, tt.urbtrpni, encstr)) if tt.urbtrsnd == URBTRSNY: log.debug(' -- possible double delivery --') if tt.urbtinst == URBTINSY: log.debug(' -- initial state --') if tt.urbtcont == URBTCONY: log.debug( ' -- to be continued with next message --') if self.handUrbt: self.handUrbt(tt, substat) rest_length -= URBTL offset += URBTL elif nn.eye == URBCEYE: cc.buffer = buffer cc.offset = offset if __debug__: if self.logging & LOGURBC: log.debug('URBC -- transaction %d continued record %d dseq %d subs %s\n'\ % (cc.urbctsnr, cc.urbcrsnr, cc.urbcdsnr, cc.urbcsnam)) if cc.urbccont == URBTCONY: log.debug( ' -- to be continued with next message --') if self.handUrbc: self.handUrbc(cc, substat) rest_length -= URBCL offset += URBCL elif nn.eye == URBSEYE: ss.buffer = buffer ss.offset = offset if __debug__: if self.logging & LOGURBS: subcTxt = '' if ss.urbsrsp > 0: subcTxt = ' -' + str(ss.urbssubc) if ss.urbsrsp == REPTOR_RSP: if 0 < ss.urbssubc < len( texts_reptor_subcodes): subcTxt += '- ' + texts_reptor_subcodes[ ss.urbssubc] else: if ss.urbsrsp in adaerror.rspdict: subcTxt += ' - ' + adaerror.rspdict[ ss.urbsrsp] log.debug('URBS %s -- %s\n' \ ' subs %s dest %s init %s rsp %d%s %s\n'\ ' ET %s dbid %d fnr %d\n'\ ' RP %s tsnr %d rtok %r' % \ (stck.sstckd(ss.urbstime), urbs_status.get(ss.urbsst,ss.urbsst), ss.urbssnam, ss.urbsdnam, ss.urbsinam, ss.urbsrsp, subcTxt, ss.urbserri, stck.sstckd(ss.urbsttim), ss.urbsdbid, ss.urbsfnr, stck.sstckd(ss.urbsptim), ss.urbstsnr, ss.urbsrtok)) datalen = ss.urbslend if datalen > 0: if ss.urbsst == URBSSTAU: # online utility ssu.buffer = buffer ssu.offset = offset + URBSL ssu.dprint() elif xtra: xtra.urbslog(ss) else: dump(ss.buffer[offset + URBSL:offset + URBSL + datalen], header=' Reptor Status/Response Data', prefix=' ', log=log.debug, ecodec=self.ecodec) if self.handUrbs: self.handUrbs(ss, substat) rest_length -= ss.urbslen offset += ss.urbslen elif nn.eye == URBIEYE: ii.buffer = buffer ii.offset = offset if __debug__: if self.logging & LOGURBI: if ii.urbirt != URBIRTST and ii.urbirt != URBIRTIS: rtypeText = 'Unknown request type ' + ii.urbirt else: rtypeText = urbi_rtext[ii.urbirt] log.debug('URBI %s -- token %s\n' \ ' subs %s dest %s rdest %s\n' \ ' init %s dbid %d fnr %d\n' % \ (rtypeText, ii.urbirtok, ii.urbisnam, ii.urbidnam, ii.urbirnam, ii.urbiinam, ii.urbidbid, ii.urbifnr)) datalen = ii.urbilend if datalen > 0: dump(ii.buffer[offset + URBIL:offset + URBIL + datalen], header=' Initial-state selection', prefix=' ', log=log.debug) if self.handUrbi: self.handUrbi(ii, substat) rest_length -= ii.urbilen offset += ii.urbilen elif nn.eye == URBUEYE: uu.buffer = buffer uu.offset = offset if __debug__: if self.logging & LOGURBU: log.debug('URBU -- ADARPE %s extract from %s (local time)\n'\ % (uu.urbuname, uu.urbudist)) if self.handUrbu: self.handUrbu(uu, substat) rest_length -= URBUL offset += URBUL elif nn.eye == URBFEYE: # global format field table (GFFT) ff.buffer = buffer ff.offset = offset if __debug__: if self.logging & LOGURBF: # ff.urbffnam[: ff.urbffnml] log.debug('URBF -- gfid=%s db=%d, fnr=%d\n' \ ' fnam=%s, items=%d\n' \ ' created=%s with %s\n%s' % (ff.urbfgfid, ff.urbfdbid, ff.urbffnr, ff.urbffnam,ff.urbfcntg, stck.sstckd(ff.urbftim1)[0:19], urbfflag_str(ff.urbfflag), ' processed=%s\n' % stck.sstckd(ff.urbftim2)[0:19] if ff.urbftim2 else '' )) datalen = ff.urbflend if datalen > 0: gg.buffer = buffer gg.offset = offset + URBFL for i in range(ff.urbfcntg): gg.dprint() gg.offset += URBGL rest_length -= ff.urbflen offset += ff.urbflen if self.handUrbf: self.handUrbf(ff, substat) else: log.error('Unknown Structure %s' % nn.eye) dump(buffer[offset:offset + rest_length], log=log.error, ecodec=self.ecodec) break
def try_all_formats(): global cm p = Datamap( 'test_all_formats p', String('str8', 8), Unicode('uni4', 4), # unicode 4 chars = 8 bytes Utf8('utf8', 8), Bytes('byt4', 4), Char('cha1'), Int1('int1'), Uint1('uin1'), Int2('int2'), Uint2('uin2'), Int4('int4'), Uint4('uin4', opt=T_STCK), # Uint4 STCK display as time Int8('int8'), Uint8('uin8', opt=T_STCK), # Uint8 STCK display as timestamp Float('flo4'), Double('dou8'), Packed('pac4', 4), Unpacked('unp1', 1), Unpacked('unp8', 8), Unpacked('datetime', 14, dt='DATETIME'), # Python datetime object Unpacked('timestamp', 20, dt='TIMESTAMP'), # Python datetime object Unpacked('date', 8, dt='DATE'), # Python date object Unpacked('time', 6, dt='TIME'), # Python time object Packed('natdate', 4, dt='NATDATE'), # Python date object Packed('nattime', 7, dt='NATTIME'), # Python datetime object Uint4('unixtime', dt='UNIXTIME'), # Python datetime object Int8('xtimestamp', dt='XTIMESTAMP'), # Python datetime object ) cm = p sz = p.getsize() # return size of datamap dtnow = datetime.now() print() print('--- Datamap=%s, size=%d ---\n' % (p.dmname, sz)) print(' datetime now: %s' % dtnow) p.buffer = Abuf(sz) # set field values p.str8 = 'Maßkrüge' # string p.uni4 = 'ßöäü' # unicode p.utf8 = 'äßö' # utf8 p.byt4 = b'\xC1\xC4\xC1\x4B' # non-ASCII bytes are displayed in HEX p.cha1 = '©' # copyright character # p.cha1='c' p.int1 = -128 p.uin1 = 255 p.int2 = -32768 p.uin2 = 65535 p.int4 = -0x80000000 p.uin4 = 0xaee3efa4 # 1.1.1997 p.int8 = -0x8000000000000000 p.uin8 = 0xb1962f9305180000 # 1.1.1999 p.flo4 = 2.0 p.dou8 = 0.5 p.pac4 = -1234567 p.unp1 = 1 p.unp8 = -12345678 p.datetime = dtnow p.date = dtnow.date() p.time = dtnow.time() p.nattime = dtnow p.natdate = dtnow.date() p.timestamp = dtnow p.unixtime = dtnow p.xtimestamp = dtnow print('-- print some numbers --') print('pac8', p.pac4) print('unp1', p.unp1) assert p.unp1 == 1 print('unp8', p.unp8) assert p.unp8 == -12345678 print('datetime', p.datetime) # datetime() str() method print('--- Dumping datamap structure ---') dump(p.buffer) print('---Printing datamap structure with dprint() ---\n') p.dprint() # print datamap print('---Printing datamap structure with lprint() ---\n') p.lprint(header=1) # print datamap header p.lprint() # print datamap p.lprint() # print datamap try: print('--- Assigning to undefined field xx ---') p.xx = 123 except DatamapError as e: print('\t', e.value) dump(e.dmap.buffer) return p
def try_var_formats(): print("--- try_var_formats() ---") global cm p = Datamap( 'test_var_formats', String('sv1', 0, opt=T_VAR1), String('sv2', 0, opt=T_VAR2), String('sv4', 0, opt=T_VAR4), Unicode('uv1', 0, opt=T_VAR1), # unicode 4 chars = 8 bytes Utf8('tv1', 0, opt=T_VAR1), # Bytes( 'bv1', 0, opt=T_VAR1), ) p0 = Datamap( 'test_var_formats0', Uint1('iv1'), Uint2('iv2'), Uint4('iv4'), Uint1('ui1'), Uint1('ti1'), # Uint1( 'bi1'), ) p1 = Datamap( 'test_var_formats1', Uint1('iv1'), String('sv1', 3), Uint2('iv2'), String('sv2', 2), Uint4('iv4'), String('sv4', 4), Uint1('ui1'), Unicode('uv1', 4), # unicode 4 chars = 8 bytes Uint1('ti1'), Utf8('tv1', 8), # Uint1( 'bi1'), # Bytes( 'bv1', 4), ) p2 = Datamap( 'test_var_formats2', Uint1('iv1'), String('sv1', 254), Uint2('iv2'), String('sv2', 256), Uint4('iv4'), String('sv4', 64000), Uint1('ui1'), Unicode('uv1', 126), # unicode 4 chars = 8 bytes Uint1('ti1'), Utf8('tv1', 8), # Uint1( 'bi1'), # Bytes( 'bv1', 4), ) cm = (p, p0, p1, p2) p0.buffer = Abuf(p0.getsize()) p1.buffer = Abuf(p1.getsize()) p2.buffer = Abuf(p2.getsize()) p0.iv1 = 1 p0.iv2 = 2 p0.iv4 = 4 p0.ui1 = 1 p0.ti1 = 1 p1.iv1 = 4 p1.iv2 = 4 p1.iv4 = 8 p1.ui1 = 9 p1.ti1 = 9 p1.sv1 = 'abc' p1.sv2 = 'DE' p1.sv4 = 'fghi' p1.uv1 = 'JKLM' p1.tv1 = 'nopqrstu' p2.iv1 = 255 p2.iv2 = 258 p2.iv4 = 64004 p2.ui1 = 253 p2.ti1 = 9 p2.sv1 = 'A' * 254 p2.sv2 = 'b' * 256 p2.sv4 = 'C' * 64000 p2.uv1 = 'd' * 126 p2.tv1 = 'E' * 8 p.buffer = p0.buffer dump(p.buffer) p.dprint() p.buffer = p1.buffer dump(p.buffer) p.prepare() p.dprint() p.buffer = p2.buffer dump(p.buffer) p.prepare() p.dprint() print(p.items()) return p, p0, p1, p2
def call(self): if self.trace & 1: print('Before Broker call') dump(self.buffer, header='ETBCB') dump(self.receive_buffer, header='Receive Buffer') # print(repr(self.send_buffer), len(self.send_buffer), self.send_length) dump(self.send_buffer, header='Send Buffer') dump(self.errtext_buffer, header='Error Text Buffer') # self.error_code='' if self.trace & 4: print('\n%s == EXX %s%s%s%s' % ( datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'), function_str(self.function), ' option=' + option_str(self.option) if self.option else '', ' wait=' + self.wait.strip('\x00 ') if self.function == FCT_RECEIVE and self.wait > 8 * ' ' else '', ' user=%s token=%s Service=%s/%s/%s' % (self.user_id, self.token, self.server_class, self.server_name, self.service) if self.function == FCT_LOGON else '')) if 0: # self.function==FCT_SEND: print(repr(self.send_buffer), len(self.send_buffer), self.send_length) dump(self.send_buffer[0:self.send_length], header=' send_buffer', prefix=' ') i = etblnk.broker(self.buffer, self.send_buffer, self.receive_buffer, self.errtext_buffer) if self.trace & 2: print('After Broker call') dump(self.buffer, header='ETBCB') # dump(self.receive_buffer, header='Receive Buffer') if self.return_length > 0: dump(self.receive_buffer[0:self.return_length], header=' receive_buffer', prefix=' ') # print(repr(self.send_buffer), len(self.send_buffer), self.send_length) dump(self.send_buffer, header='Send Buffer') dump(self.errtext_buffer, header='Error Text Buffer') if self.trace & 4: print(' conv_id=%-16s conv_stat=%s return_length=%d' % (self.conv_id.strip('\x00 '), convstat_str( self.conv_stat), self.return_length)) print(' uowID=%-16s uowStatus=%-9s commitTime=%s' % (self.uowID.strip('\x00 '), uowStatus_str( self.uowStatus), pptime(self.commitTime))) if self.error_code > '00000000': if self.error_code in ( '00740074', # Wait timeout '02150373'): # Transport timeout (new with single conv?) raise BrokerTimeOut(acierror.geterror(self.error_code), self) raise BrokerError(acierror.geterror(self.error_code), self) if i != 0: raise InterfaceError(acierror.geterror('0020%04d' % i), self)