def _logfieldcontent(noderecord): for key, value in noderecord.items(): if key not in ["BOTSID", "BOTSIDnr"]: logger.debug(' "%(key)s" : "%(value)s"', { "key": key, "value": value })
def initfromfile(self): logger.debug('Read edi file from: "%(in)s".', {'in': self.filehandler}) # TODO - to delete """ self.root = readdata_bin(filename=self.ta_info["filename"]) """ self.root = self.filehandler.read_data(name=self.ta_info["filename"])
def writeall(self): if self.root is None: raise OutMessageError( _("No outgoing message")) # then there is nothing to write... logger.debug('Start writing to file "%(filename)s".', self.ta_info) writedata_pickled(self.ta_info["filename"], self.root) logger.debug('End writing to file "%(filename)s".', self.ta_info) self.ta_info["envelope"] = "db" self.ta_info["merge"] = False
def writeall(self): if self.root is None: raise OutMessageError( _("No outgoing message")) # then there is nothing to write... logger.debug('Start writing to "%(out)s".', {'out': self._outstream}) self._outstream = self._edi_storage.opendata_bin( filename=self.ta_info["filename"], mode="wb") self._outstream.write(self.root) self._outstream.close() logger.debug('End writing to "%(out)s".', {'out': self._outstream}) self.ta_info["envelope"] = "raw" self.ta_info["merge"] = False
def _readcontent_edifile(self): """ open the edi file. """ logger.debug('Read edi file "%(filename)s".', self.ta_info) # TODO - to delete """ self.filehandler = opendata( filename=self.ta_info["filename"], mode="rb", charset=self.ta_info["charset"], errors=self.ta_info["checkcharsetin"], ) """ self.filehandler.current_key = self.ta_info["filename"]
def _initwrite(self): logger.debug('Start writing to: "%(out)s".', {'out': self._outstream}) # TODO - to delete """ self._outstream = opendata( self.ta_info["filename"], "wb", charset=self.ta_info["charset"], errors=self.ta_info["checkcharsetout"], ) """ self._outstream = self._edi_storage.opendata( filename=self.ta_info["filename"], mode="wb", charset=self.ta_info["charset"], check_charset_mode=self.ta_info["checkcharsetout"])
def _readcontent_edifile(self): """ read content of edi file to memory. """ logger.debug('Read edi from: "%(in)s".', {'in': self.ta_info["filename"]}) # TODO - to delete """ self.rawinput = readdata( filename=self.ta_info["filename"], charset=self.ta_info["charset"], errors=self.ta_info["checkcharsetin"], ) """ self.rawinput = self._edi_storage.readdata( filename=self.ta_info["filename"], charset=self.ta_info["charset"], check_charset_mode=self.ta_info["checkcharsetin"])
def handle_out_message(out_translated, ta_translated): if (out_translated.ta_info["statust"] == TransactionStatus.DONE ): # if indicated in mappingscript the message should be discarded logger.debug( _("No output file because mappingscript explicitly indicated this." )) out_translated.ta_info["filename"] = "" out_translated.ta_info["status"] = TranslationStatus.DISCARD else: logger.debug( _('Start writing output file editype "%(editype)s" messagetype "%(messagetype)s".' ), out_translated.ta_info, ) out_translated.writeall() # write result of translation. out_translated.ta_info["filesize"] = os.path.getsize( abspathdata(out_translated.ta_info["filename"])) # get filesize ta_translated.update( **out_translated.ta_info ) # update outmessage transaction with ta_info; statust = TransactionStatus.OK
def _write(self, node_instance): templatefile = abspath(self.__class__.__name__, self.ta_info["template"]) try: logger.debug('Start writing to file "%(filename)s".', self.ta_info) loader = self.template.TemplateLoader(auto_reload=False) tmpl = loader.load(templatefile) except: txt = txtexc() raise OutMessageError( _('While templating "%(editype)s.%(messagetype)s", error:\n%(txt)s'), { "editype": self.ta_info["editype"], "messagetype": self.ta_info["messagetype"], "txt": txt, }, ) try: self._outstream.write_mode = Writer.WRITE_BINARY_MODE if self.ta_info["has_structure"]: # new way of working if self.ta_info["print_as_row"]: node_instance.collectlines(self.ta_info["print_as_row"]) stream = tmpl.generate(node=node_instance) else: stream = tmpl.generate(data=self.data) stream.render( method="xhtml", encoding=self.ta_info["charset"], out=self._outstream ) except: txt = txtexc() raise OutMessageError( _('While templating "%(editype)s.%(messagetype)s", error:\n%(txt)s'), { "editype": self.ta_info["editype"], "messagetype": self.ta_info["messagetype"], "txt": txt, }, ) finally: self._outstream.close() logger.debug(_('End writing to file "%(filename)s".'), self.ta_info)
def initfromfile(self): logger.debug('Read edi file "%(filename)s".', self.ta_info) filename = abspathdata(self.ta_info["filename"]) if self.ta_info["messagetype"] == "mailbag": # the messagetype is not know. # bots reads file usersys/grammars/xml/mailbag.py, and uses 'mailbagsearch' to determine the messagetype # mailbagsearch is a list, containing python dicts. Dict consist of 'xpath', 'messagetype' and (optionally) 'content'. # 'xpath' is a xpath to use on xml-file (using elementtree xpath functionality) # if found, and 'content' in the dict; if 'content' is equal to value found by xpath-search, then set messagetype. # if found, and no 'content' in the dict; set messagetype. try: module, grammarname = botsimport("grammars", "xml", "mailbag") mailbagsearch = getattr(module, "mailbagsearch") except AttributeError: logger.error( "Missing mailbagsearch in mailbag definitions for xml.") raise except BotsImportError: logger.error( "Missing mailbag definitions for xml, should be there.") raise parser = ET.XMLParser() try: extra_character_entity = getattr(module, "extra_character_entity") for key, value in extra_character_entity.items(): parser.entity[key] = value except AttributeError: pass # there is no extra_character_entity in the mailbag definitions, is TransactionStatus.OK. etree = ( ET.ElementTree() ) # ElementTree: lexes, parses, makes etree; etree is quite similar to bots-node trees but conversion is needed etreeroot = etree.parse(filename, parser) for item in mailbagsearch: if "xpath" not in item or "messagetype" not in item: raise InMessageError( _("Invalid search parameters in xml mailbag.")) found = etree.find(item["xpath"]) if found is not None: if "content" in item and found.text != item["content"]: continue self.ta_info["messagetype"] = item["messagetype"] break else: raise InMessageError( _("Could not find right xml messagetype for mailbag.")) self.messagegrammarread(typeofgrammarfile="grammars") else: self.messagegrammarread(typeofgrammarfile="grammars") parser = ET.XMLParser() for key, value in self.ta_info["extra_character_entity"].items(): parser.entity[key] = value etree = ( ET.ElementTree() ) # ElementTree: lexes, parses, makes etree; etree is quite similar to bots-node trees but conversion is needed etreeroot = etree.parse(filename, parser) self._handle_empty(etreeroot) self.stackinit() self.root = self._etree2botstree( etreeroot) # convert etree to bots-nodes-tree self.checkmessage(self.root, self.defmessage) self.ta_info.update(self.root.queries)
def initfromfile(self): logger.debug('Read edi file "%(filename)s".', self.ta_info) self.root = readdata_pickled(filename=self.ta_info["filename"])
def _translate_one_file(row, routedict, endstatus, userscript, scriptname): """ - read, lex, parse, make tree of nodes. - split up files into messages (using 'nextmessage' of grammar) - get mappingscript, start mappingscript. - write the results of translation (no enveloping yet) """ try: ta_fromfile = OldTransaction(row["idta"]) ta_parsed = ta_fromfile.copyta(status=PARSED) if row["filesize"] > config.get(["settings", "maxfilesizeincoming"]): ta_parsed.update(filesize=row["filesize"]) raise FileTooLargeError( _('File size of %(filesize)s is too big; option "maxfilesizeincoming" in bots.ini is %(maxfilesizeincoming)s.' ), { "filesize": row["filesize"], "maxfilesizeincoming": config.get(["settings", "maxfilesizeincoming"]), }, ) logger.debug( _('Start translating file "%(filename)s" editype "%(editype)s" messagetype "%(messagetype)s".' ), row, ) # read whole edi-file: read, parse and made into a inmessage-object. # Message is represented as a tree (inmessage.root is the root of the # tree). edifile = inmessage.parse_edi_file( frompartner=row["frompartner"], topartner=row["topartner"], filename=row["filename"], edi_storage=row["edi_storage"], messagetype=row["messagetype"], testindicator=row["testindicator"], editype=row["editype"], charset=row["charset"], alt=row["alt"], fromchannel=row["fromchannel"], frommail=row["frommail"], tomail=row["tomail"], idroute=routedict["idroute"], command=routedict["command"], ) edifile.checkforerrorlist( ) # no exception if infile has been lexed and parsed TransactionStatus.OK else raises an error # parse & passthrough; file is parsed, partners are known, no mapping, does confirm. if int(routedict["translateind"]) == TranslationStatus.DISCARD: # partners should be queried from ISA level! raise GotoException("dummy") # edifile.ta_info contains info: QUERIES, charset etc for (inn_splitup) in ( edifile.nextmessage() ): # for each message in parsed edifile (one message might get translation multiple times via 'alt' try: ta_splitup = ta_parsed.copyta( status=SPLITUP, **inn_splitup.ta_info) # copy db-ta from PARSED # inn_splitup.ta_info contains parameters from inmessage.parse_edi_file(): # syntax-information, parse-information # for confirmations in userscript; the idta of incoming file inn_splitup.ta_info["idta_fromfile"] = ta_fromfile.idta # for confirmations in userscript; the idta of 'confirming message' inn_splitup.ta_info["idta"] = ta_splitup.idta number_of_loops_with_same_alt = 0 while ( True ): # more than one translation can be done via 'alt'; there is an explicit brreak if no more translation need to be done. # find/lookup the translation************************ tscript, toeditype, tomessagetype = \ row['tscript'], \ routedict['toeditype'], \ routedict['tomessagetype'] # TODO tscript value in row is added for mock, routedict however exists but is also used for mock """ inmessage.lookup_translation( fromeditype=inn_splitup.ta_info["editype"], frommessagetype=inn_splitup.ta_info["messagetype"], frompartner=inn_splitup.ta_info["frompartner"], topartner=inn_splitup.ta_info["topartner"], alt=inn_splitup.ta_info["alt"], ) """ if ( not tscript ): # no translation found in translate table; check if can find translation via user script if userscript and hasattr(userscript, "gettranslation"): tscript, toeditype, tomessagetype = runscript( userscript, scriptname, "gettranslation", idroute=routedict["idroute"], message=inn_splitup, ) if not tscript: raise TranslationNotFoundError( _('Translation not found for editype "%(editype)s", messagetype "%(messagetype)s", frompartner "%(frompartner)s", topartner "%(topartner)s", alt "%(alt)s".' ), inn_splitup.ta_info, ) # store name of mapping script for reporting (used for display in GUI). inn_splitup.ta_info["divtext"] = tscript # initialize new out-object************************* # make ta for translated message (new out-ta); explicitly erase mail-addresses ta_translated = ta_splitup.copyta(status=endstatus, frommail="", tomail="", cc="") filename_translated = str(ta_translated.idta) out_translated = outmessage.outmessage_init( editype=toeditype, messagetype=tomessagetype, filename=filename_translated, edi_storage=row["edi_storage"], reference=unique("messagecounter"), statust=TransactionStatus.OK, divtext=tscript, ) # make outmessage object # run mapping script************************ logger.debug( _('Mappingscript "%(tscript)s" translates messagetype "%(messagetype)s" to messagetype "%(tomessagetype)s".' ), { "tscript": tscript, "messagetype": inn_splitup.ta_info["messagetype"], "tomessagetype": out_translated.ta_info["messagetype"], }, ) translationscript, scriptfilename = botsimport( "mappings", inn_splitup.ta_info["editype"], tscript) # get the mappingscript alt_from_previous_run = inn_splitup.ta_info[ "alt"] # needed to check for infinite loop doalttranslation = runscript( translationscript, scriptfilename, "main", inn=inn_splitup, out=out_translated, ) logger.debug(_('Mappingscript "%(tscript)s" finished.'), {"tscript": tscript}) # ~ if 'topartner' not in out_translated.ta_info: #out_translated does not contain values from ta......#20140516: disable this. suspected since long it does not acutally do soemthing. tested this. # ~ out_translated.ta_info['topartner'] = inn_splitup.ta_info['topartner'] # manipulate botskey after mapping script: if "botskey" in inn_splitup.ta_info: inn_splitup.ta_info["reference"] = inn_splitup.ta_info[ "botskey"] if "botskey" in out_translated.ta_info: out_translated.ta_info[ "reference"] = out_translated.ta_info["botskey"] # check the value received from the mappingscript to determine what to do # in this while-loop. Handling of chained trasnlations. if doalttranslation is None: # translation(s) are done; handle out-message handle_out_message(out_translated, ta_translated) break # break out of while loop elif isinstance(doalttranslation, dict): # some extended cases; a dict is returned that contains 'instructions' for # some type of chained translations if ("type" not in doalttranslation or "alt" not in doalttranslation): raise BotsError( _('Mappingscript returned "%(alt)s". This dict should not have "type" and "alt.' ), {"alt": doalttranslation}, ) if alt_from_previous_run == doalttranslation["alt"]: number_of_loops_with_same_alt += 1 else: number_of_loops_with_same_alt = 0 if doalttranslation["type"] == "out_as_inn": # do chained translation: use the out-object as inn-object, new out-object # use case: detected error in incoming file; use out-object to generate warning email copy_out_message = copy.deepcopy(out_translated) handle_out_message(copy_out_message, ta_translated) inn_splitup = out_translated # out-object is now inn-object # get the alt-value for the next chained translation inn_splitup.ta_info["alt"] = doalttranslation[ "alt"] if not "frompartner" in inn_splitup.ta_info: inn_splitup.ta_info["frompartner"] = "" if not "topartner" in inn_splitup.ta_info: inn_splitup.ta_info["topartner"] = "" inn_splitup.ta_info.pop("statust") elif doalttranslation[ "type"] == "no_check_on_infinite_loop": # do chained translation: allow many loops wit hsame alt-value. # mapping script will have to handle this correctly. number_of_loops_with_same_alt = 0 handle_out_message(out_translated, ta_translated) # get the alt-value for the next chained translation inn_splitup.ta_info["alt"] = doalttranslation[ "alt"] else: # there is nothing else raise BotsError( _('Mappingscript returned dict with an unknown "type": "%(doalttranslation)s".' ), {"doalttranslation": doalttranslation}, ) else: # note: this includes alt '' (empty string) if alt_from_previous_run == doalttranslation: number_of_loops_with_same_alt += 1 else: number_of_loops_with_same_alt = 0 # do normal chained translation: same inn-object, new out-object handle_out_message(out_translated, ta_translated) # get the alt-value for the next chained translation inn_splitup.ta_info["alt"] = doalttranslation if number_of_loops_with_same_alt > 10: raise BotsError( _('Mappingscript returns same alt value over and over again (infinite loop?). Alt: "%(doalttranslation)s".' ), {"doalttranslation": doalttranslation}, ) # end of while-loop ********************************************************************************** # exceptions file_out-level: exception in mappingscript or writing of out-file except: # two ways to handle errors in mapping script or in writing outgoing message: # 1. do process other messages in file/interchange (default in bots 3.*) # 2. one error in file/interchange->drop all results (as in bots 2.*) if inn_splitup.ta_info.get( "no_results_if_any_error_in_translation_edifile", False): raise txt = txtexc() # update db. inn_splitup.ta_info could be changed by mappingscript. Is this useful? ta_splitup.update(statust=TransactionStatus.ERROR, errortext=txt, **inn_splitup.ta_info) ta_splitup.deletechildren() else: # update db. inn_splitup.ta_info could be changed by mappingscript. Is this useful? ta_splitup.update(statust=TransactionStatus.DONE, **inn_splitup.ta_info) # exceptions file_in-level except GotoException: # edi-file is TransactionStatus.OK, file is passed-through after parsing. ta_parsed.update(statust=TransactionStatus.DONE, filesize=row["filesize"], **edifile.ta_info) # update with info from eg queries ta_parsed.copyta(status=MERGED, statust=TransactionStatus.OK ) # original file goes straight to MERGED edifile.handleconfirm(ta_fromfile, routedict, error=False) logger.debug(_('Parse & passthrough for input file "%(filename)s".'), row) except FileTooLargeError as msg: ta_parsed.update(statust=TransactionStatus.ERROR, errortext=str(msg)) ta_parsed.deletechildren() logger.debug( 'Error in translating input file "%(filename)s":\n%(msg)s', { "filename": row["filename"], "msg": msg }, ) except: txt = txtexc() ta_parsed.update(statust=TransactionStatus.ERROR, errortext=txt, **edifile.ta_info) ta_parsed.deletechildren() edifile.handleconfirm(ta_fromfile, routedict, error=True) logger.debug( 'Error in translating input file "%(filename)s":\n%(msg)s', { "filename": row["filename"], "msg": txt }, ) else: edifile.handleconfirm(ta_fromfile, routedict, error=False) ta_parsed.update(statust=TransactionStatus.DONE, filesize=row["filesize"], **edifile.ta_info) logger.debug(_('Translated input file "%(filename)s".'), row) finally: ta_fromfile.update(statust=TransactionStatus.DONE)
def _initwrite(self): logger.debug('Start writing to "%(out)s".', {'out': self._outstream}) self._outstream = self._edi_storage.opendata_bin(filename=self.ta_info["filename"], mode="wb")
def initfromfile(self): """ initialisation from an excel file. file is first converted to csv using python module xlrd """ try: self.xlrd = botsbaseimport("xlrd") except ImportError: raise ImportError( _('Dependency failure: editype "excel" requires python library "xlrd".' )) import csv as csvlib try: import StringIO except: import io as StringIO self.messagegrammarread(typeofgrammarfile="grammars") self.ta_info["charset"] = self.defmessage.syntax[ "charset"] # always use charset of edi file. if self.ta_info["escape"]: doublequote = False else: doublequote = True logger.debug('Read edi file "%(filename)s".', self.ta_info) # xlrd reads excel file; python's csv modules write this to file-like # StringIO (as utf-8); read StringIO as self.rawinput; decode this # (utf-8->str) infilename = abspathdata(self.ta_info["filename"]) try: xlsdata = self.read_xls(infilename) except: txt = txtexc() logger.error( _("Excel extraction failed, may not be an Excel file? Error:\n%(txt)s" ), {"txt": txt}, ) raise InMessageError( _("Excel extraction failed, may not be an Excel file? Error:\n%(txt)s" ), {"txt": txt}, ) rawinputfile = StringIO.StringIO() csvout = csvlib.writer( rawinputfile, quotechar=self.ta_info["quote_char"], delimiter=self.ta_info["field_sep"], doublequote=doublequote, escapechar=self.ta_info["escape"], ) csvout.writerows(map(self.utf8ize, xlsdata)) rawinputfile.seek(0) self.rawinput = rawinputfile.read() rawinputfile.close() self.rawinput = self.rawinput.decode("utf-8") # start lexing and parsing as csv self._lex() if hasattr(self, "rawinput"): del self.rawinput self.root = node.Node() # make root Node None. self.iternext_lex_record = iter(self.lex_records) leftover = self._parse(structure_level=self.defmessage.structure, inode=self.root) if leftover: raise InMessageError( _('[A52]: Found non-valid data at end of excel file: "%(leftover)s".' ), {"leftover": leftover}, ) del self.lex_records self.checkmessage(self.root, self.defmessage)
def _readcontent_edifile(self): """ read content of edifact file in memory. is read as binary. In _sniff determine charset; then decode according to charset """ logger.debug('Read edi file from: "%(in)s".', {'in': self.filehandler}) self.rawinput = self.filehandler.read_binary()
def handleconfirm(self, ta_fromfile, routedict, error): """ done at end of edifact file handling. generates CONTRL messages (or not) """ # for fatal errors there is no decent node tree if self.errorfatal: return # check if there are any 'send-edifact-CONTRL' confirmrules. confirmtype = "send-edifact-CONTRL" if not globalcheckconfirmrules(confirmtype): return editype = "edifact" # self.__class__.__name__ AcknowledgeCode = "7" if not error else "4" for nodeunb in self.getloop({"BOTSID": "UNB"}): sender = nodeunb.get({"BOTSID": "UNB", "S002.0004": None}) receiver = nodeunb.get({"BOTSID": "UNB", "S003.0010": None}) nr_message_to_confirm = 0 messages_not_confirm = [] for nodeunh in nodeunb.getloop({"BOTSID": "UNB"}, {"BOTSID": "UNH"}): messagetype = nodeunh.queries["messagetype"] # no CONTRL for CONTRL or APERAK message; check if CONTRL should be send via confirmrules if messagetype[:6] in [ "CONTRL", "APERAK" ] or not checkconfirmrules( confirmtype, idroute=self.ta_info["idroute"], idchannel=self.ta_info["fromchannel"], frompartner=sender, topartner=receiver, messagetype=messagetype, ): messages_not_confirm.append(nodeunh) else: nr_message_to_confirm += 1 if not nr_message_to_confirm: continue # remove message not to be confirmed from tree (is destructive, but this is end of file processing anyway. for message_not_confirm in messages_not_confirm: nodeunb.children.remove(message_not_confirm) # check if there is a user mappingscript tscript, toeditype, tomessagetype = lookup_translation( fromeditype=editype, frommessagetype="CONTRL", frompartner=receiver, topartner=sender, alt="", ) if not tscript: tomessagetype = "CONTRL22UNEAN002" # default messagetype for CONTRL translationscript = None else: translationscript, scriptfilename = botsimport( "mappings", editype, tscript) # import the mappingscript # generate CONTRL-message. One received interchange->one CONTRL-message reference = str(unique("messagecounter")) ta_confirmation = ta_fromfile.copyta(status=TRANSLATED) filename = str(ta_confirmation.idta) out = outmessage.outmessage_init( editype=editype, messagetype=tomessagetype, filename=filename, reference=reference, statust=TransactionStatus.OK, ) # make outmessage object out.ta_info["frompartner"] = receiver # reverse! out.ta_info["topartner"] = sender # reverse! if translationscript and hasattr(translationscript, "main"): runscript( translationscript, scriptfilename, "main", inn=self, out=out, routedict=routedict, ta_fromfile=ta_fromfile, ) else: # default mapping script for CONTRL # write UCI for UNB (envelope) out.put({ "BOTSID": "UNH", "0062": reference, "S009.0065": "CONTRL", "S009.0052": "2", "S009.0054": "2", "S009.0051": "UN", "S009.0057": "EAN002", }) out.put({"BOTSID": "UNH"}, { "BOTSID": "UCI", "0083": AcknowledgeCode }) out.put( {"BOTSID": "UNH"}, { "BOTSID": "UCI", "0020": nodeunb.get({ "BOTSID": "UNB", "0020": None }), }, ) out.put({"BOTSID": "UNH"}, { "BOTSID": "UCI", "S002.0004": sender }) # not reverse! out.put( {"BOTSID": "UNH"}, { "BOTSID": "UCI", "S002.0007": nodeunb.get({ "BOTSID": "UNB", "S002.0007": None }), }, ) out.put( {"BOTSID": "UNH"}, { "BOTSID": "UCI", "S002.0008": nodeunb.get({ "BOTSID": "UNB", "S002.0008": None }), }, ) out.put( {"BOTSID": "UNH"}, { "BOTSID": "UCI", "S002.0042": nodeunb.get({ "BOTSID": "UNB", "S002.0042": None }), }, ) out.put({"BOTSID": "UNH"}, { "BOTSID": "UCI", "S003.0010": receiver }) # not reverse! out.put( {"BOTSID": "UNH"}, { "BOTSID": "UCI", "S003.0007": nodeunb.get({ "BOTSID": "UNB", "S003.0007": None }), }, ) out.put( {"BOTSID": "UNH"}, { "BOTSID": "UCI", "S003.0014": nodeunb.get({ "BOTSID": "UNB", "S003.0014": None }), }, ) out.put( {"BOTSID": "UNH"}, { "BOTSID": "UCI", "S003.0046": nodeunb.get({ "BOTSID": "UNB", "S003.0046": None }), }, ) # write UCM for each UNH (message) for nodeunh in nodeunb.getloop({"BOTSID": "UNB"}, {"BOTSID": "UNH"}): lou = out.putloop({"BOTSID": "UNH"}, {"BOTSID": "UCM"}) lou.put({"BOTSID": "UCM", "0083": AcknowledgeCode}) lou.put({ "BOTSID": "UCM", "0062": nodeunh.get({ "BOTSID": "UNH", "0062": None }), }) lou.put({ "BOTSID": "UCM", "S009.0065": nodeunh.get({ "BOTSID": "UNH", "S009.0065": None }), }) lou.put({ "BOTSID": "UCM", "S009.0052": nodeunh.get({ "BOTSID": "UNH", "S009.0052": None }), }) lou.put({ "BOTSID": "UCM", "S009.0054": nodeunh.get({ "BOTSID": "UNH", "S009.0054": None }), }) lou.put({ "BOTSID": "UCM", "S009.0051": nodeunh.get({ "BOTSID": "UNH", "S009.0051": None }), }) lou.put({ "BOTSID": "UCM", "S009.0057": nodeunh.get({ "BOTSID": "UNH", "S009.0057": None }), }) # last line (counts the segments produced in out-message) out.put( {"BOTSID": "UNH"}, { "BOTSID": "UNT", "0074": out.getcount() + 1, "0062": reference }, ) # try to run the user mapping script fuction 'change' (after the default # mapping); 'chagne' fucntion recieves the tree as written by default # mapping, function can change tree. if translationscript and hasattr(translationscript, "change"): runscript( translationscript, scriptfilename, "change", inn=self, out=out, routedict=routedict, ta_fromfile=ta_fromfile, ) # write tomessage (result of translation) out.writeall() logger.debug( 'Send edifact confirmation (CONTRL) route "%(route)s" fromchannel "%(fromchannel)s" frompartner "%(frompartner)s" topartner "%(topartner)s".', { "route": self.ta_info["idroute"], "fromchannel": self.ta_info["fromchannel"], "frompartner": receiver, "topartner": sender, }, ) # this info is used in transform.py to update the ta.....ugly... self.ta_info.update( confirmtype=confirmtype, confirmed=True, confirmasked=True, confirmidta=ta_confirmation.idta, ) ta_confirmation.update(**out.ta_info) # update ta for confirmation
def _closewrite(self): logger.debug('End writing to: "%(out)s".', {'out': self._outstream}) self._outstream.close()
def _logmessagecontent(self, node_instance): logger.debug('Record "%(BOTSID)s":', node_instance.record) self._logfieldcontent( node_instance.record) # handle fields of this record for child in node_instance.children: self._logmessagecontent(child)