def parse(self): target = GetXmlAttrs() parser = XMLParser(target=target) if self.config: parser.feed(self.config) return target
def load(file, env): cXMLParser = CXMLParser(env) parser = XMLParser(target=cXMLParser) f = open(file, 'r+') read_data = f.read(); f.close() parser.feed(read_data) parser.close() return cXMLParser.env
def getQueryResult(query,detailedLog=True): parser = XMLParser(target=ProcessCSQueryResult()) queryAnswerXML = urllib.urlopen(query).read() if detailedLog: log_CSQuery(queryAnswerXML) parser.feed(queryAnswerXML) return parser.close()
def getQueryResult(query, detailedLog=True): parser = XMLParser(target=ProcessCSQueryResult()) queryAnswerXML = urllib.urlopen(query).read() if detailedLog: log_CSQuery(queryAnswerXML) parser.feed(queryAnswerXML) return parser.close()
def Parse(self, data): if len(data) < sizeof(CryXMLBHeader): raise ValueError( "File is not a binary XML file (file size is too small).") self._data = data self._header = CryXMLBHeader.from_buffer(data, 0) # TODO: actually do header validation - see references if self._header.signature != b"CryXmlB": if self._header.signature.startswith(b"<"): # try parsing as a normal xml file parser = XMLParser(target=self.target) parser.feed(self._data) raise _StandardXmlFile() raise ParseError("Invalid CryXmlB Signature") self._attributes = [ self._read_attribute(i) for i in range(self._header.attributes_count) ] self._child_indices = [ self._read_child_index(i) for i in range(self._header.child_table_count) ] self._nodes = [ self._read_node(i) for i in range(self._header.node_count) ] root_node = self._read_node(0) assert root_node.parent_index == CRYXML_NO_PARENT self._iter_parse_nodes(root_node)
def get_max_depth(exampleXml): target = MaxDepth() parser = XMLParser(target=target) parser.feed(exampleXml) depth = parser.close() return depth
def build(self, root=None): if root is None: was_root = True root = TreeBuilder() else: was_root = False root.start(self.tagname(), self.attrs()) for i, child in enumerate(self.children): if isinstance(child, HTMLBuilder): child.build(root=root) else: if i in self._formatted: try: proxy = TreeProxy(root) parser = XMLParser(html=True, target=proxy) parser.feed(child) proxy.cleanup() except Exception as e: print("Bad formatting", e) root.data(str(child)) else: root.data(str(child)) root.end(self.tagname()) if was_root: root = root.close() return str(tostring(root, method="html").decode('utf-8'))
def get_ebelge_users(): parser = XMLParser(target=EbelgeUsers()) parser.feed( frappe.read_file( frappe.get_site_path("private", "files", "KullaniciListesiXml", "newUserPkList.xml"))) return parser.close()
def parse(self, fIn, oHolder): """Parse XML file into the card set holder""" oParser = XMLParser(target=self._cState(oHolder)) try: for sLine in fIn: oParser.feed(sLine) except ParseError as oExp: raise IOError('Not an valid XML file') from oExp
def parse(self, fIn, oHolder): """Parse XML file into the card set holder""" oParser = XMLParser(target=self._cState(oHolder)) try: for sLine in fIn: oParser.feed(sLine) except ParseError, oExp: raise IOError('Not an XML file: %s' % oExp)
def new_parsetree_from_xml(xml): # For some reason this does not work with cElementTree.XMLBuilder ... from xml.etree.ElementTree import XMLParser from zim.formats import ParseTree builder = XMLParser() builder.feed(xml) root = builder.close() return ParseTree(root)
def HTMLOfENML(text, resources={}): target = HTMLCreatorTarget(resources) parser = XMLParser(target=target) parser.feed(text) parser.close() return tostring(target.root, encoding='utf8', method='html')
def mm2otl(*arg, **kwarg): fname = arg[0][0] file = codecs.open(fname, 'r', encoding='utf-8') filelines = file.readlines() outline = Outline() parser = XMLParser(target=outline, encoding='utf-8') parser.feed(filelines[0].encode('utf-8')) parser.close()
def runTest(self): '''Test OldParseTreeBuilder class''' # - Test \n before and after h / p / pre # - Test break line into lines input = '''\ <?xml version='1.0' encoding='utf-8'?> <zim-tree> foo<h level="1">bar</h>baz dus<pre>ja</pre>hmm <h level="2">foo </h>bar dus ja <emphasis>hmm dus ja </emphasis>grrr <strong>foo bar </strong> <strike></strike><emphasis> </emphasis>. </zim-tree>''' wanted = '''\ <?xml version='1.0' encoding='utf-8'?> <zim-tree> foo <h level="1">bar</h> baz dus <pre>ja </pre>hmm <h level="2">foo</h> bar dus ja <emphasis>hmm</emphasis> <emphasis>dus ja</emphasis> grrr <strong>foo</strong> <strong>bar</strong> . </zim-tree>''' from xml.etree.ElementTree import XMLParser builder = XMLParser(target=OldParseTreeBuilder()) builder.feed(input) root = builder.close() tree = ParseTree(root) self.assertEqual(tree.tostring(), wanted)
def main(name): with open(name, "r") as f: parser = XMLParser() for line in f: parser.feed(line) xml = XMLTree(parser.close(), False) M.run(xml.get("files"), xml.get("structure"), xml.get("pages"))
def fromStream(cls, source): parser = XMLParser(target=WebDAVContentHandler()) try: while 1: data = source.read(65536) if not data: break parser.feed(data) except XMLParseError, e: raise ValueError(e)
def __init__(self): f = open(self.config_file) xml_src = f.read() f.close() parser = XMLParser() parser.feed(xml_src) self.config_tree = parser.close() self.parse()
def kplist_parse(plist): """Parse a kernel-style property list.""" try: builder = _KPlistBuilder() parser = XMLParser(target=builder) parser.feed(plist) return parser.close() except Exception as e: print(e) return None
def read(filename): """ Read an atlas from a XML file. """ builder = TreeBuilder(filename) parser = XMLParser(target=builder) data = open(filename).read() parser.feed(data) return parser.close()
def find_depth(chaine): target = MaxDepth() parser = XMLParser(target=target) exampleXml = """<feed xml:lang='en'> <title>HackerRank</title> <subtitle lang='en'>Programming challenges</subtitle> <link rel='alternate' type='text/html' href='http://hackerrank.com/'/> <updated>2013-12-25T12:00:00</updated> </feed>""" parser.feed(chaine) print(parser.close()-1)
def load(self, path): """ """ own_xml_parser = CorpusInterTASSXMLParser() xml_parser = XMLParser(target=own_xml_parser) with open(path, encoding=self.__encoding) as corpus_file: for line in corpus_file: xml_parser.feed(line) if own_xml_parser.full_doc: raw_tweet = own_xml_parser.doc self.__add_document(raw_tweet)
def get_tree(xml_string): """ Get the xml tree associated to an xml string :param request: - string :return: """ parser = XMLParser(target=MyParser()) parser.feed(xml_string) tree = parser.close() return tree
def parse_xml_string(data): """ This method parses through the xml string and counts the number of occurence of the xml tag. Args: data (str): Stringify data of xml Returns: ELEMENT_COUNT (dict): dictionary of values containing counts of elements """ class ElementCounter: """Class to get count of Element occurance.""" ELEMENT_COUNT = defaultdict(lambda: 0) def start(self, tag, attrib): self.ELEMENT_COUNT[tag] += 1 def end(self, tag): pass def data(self, data): pass def close(self): pass result = {'error': True, 'data': {}, 'message': 'Unstructured XML'} target = ElementCounter() parser = XMLParser(target=target) try: parser.feed(data) except ParseError as e: result.update({'error': True, 'message': 'Unstructured data'}) return result except Exception as e: result.update({'error': True, 'message': '{}'.format(e)}) return result try: parser.close() except ParseError as e: result.update({'error': True, 'message': 'Empty XML'}) return result except Exception as e: result.update({'error': True, 'message': '{}'.format(e)}) return result result.update({ 'error': False, 'message': 'xml parse successful', 'data': dict(target.ELEMENT_COUNT) }) return result
def from_xml(xml): """ Deserialize from a XML string. :param: xml string :rtype: object tree from XML string """ handler = XMLHandler() parser = XMLParser(target=handler) parser.feed(xml) parser.close() return handler.root
def main(): if 1 < len(sys.argv): # .xml file path in $1 argument, else use /dev/stdin path = sys.argv[1] text = open(path).read() else: text = "\n".join(sys.stdin.readlines()) ntg = sys.argv[2] if 2 < len(sys.argv) else None parser = XMLParser(target=_Parser_xml(ntg)) text = re.sub('\\sxmlns="[^"]+"', '', text, count=1) parser.feed(text) parser.close()
def __init__(self, config_file=None): if config_file: self.config_file = config_file f = open(self.config_file) xml_src = f.read() f.close() parser = XMLParser() parser.feed(xml_src) self.config_tree = parser.close() self.parse()
def __init__(self, conf): f = open(conf.uwsgifile) logging.basicConfig(level=conf.debuglevel) self.log = logging xml_src = f.read() f.close() parser = XMLParser() parser.feed(xml_src) self.config_tree = parser.close() self.parse()
def main(): N = input() lines = [] for n in xrange(N): line = raw_input() lines.append(line) parser = XMLParser(target=MaxDepth()) parser.feed("\n".join(lines)) print (parser.close() - 1)
def main(): init() log.info("Parsing '%s'..." % os.path.basename(conf['source_file'])) stopwatch_set() target = CustomParser() parser = XMLParser(target=target) parser.feed(open(conf['source_file']).read()) log.info('') totals = 'Total: posts: {post}; pages: {page}; comments: {comment}' log.info(totals.format(**stats)) log.info('Elapsed time: %s s' % stopwatch_get())
def parse(self, source=None, parser=None): try: if not parser: parser = XMLParser(target=TreeBuilder()) while 1: data = self.mergeScreenConfigs() if not data: break parser.feed(data) return parser.close() # self._root = parser.close() # return self._root except: pass
def ProcessCorpus(): print("Gathering Files") global dataRecorder for file in glob(r'E:/Documents/UNE/HUMS_301.2/Corpus/Corpus_XML\*.xml'): tree = etree.parse(file) root = tree.getroot() xmlString = etree.tostring(root, encoding="UTF-8", method='xml') dataRecorder = DataRecorder() parser = XMLParser(target=dataRecorder) parser.feed(xmlString) parser.close() print('Corpus procssing compelete!')
def __init__(self, xml_encode): super().__init__() parser = XMLParser(target=MaxDepth()) pepe = parser.feed(xml_encode) parser.close() print(self.resul_property) self.xml_encode = xml_encode
def parse_reqdata(self, get_data, post_data): """ parse query params and POST data into dictionary """ print 'debuggery: runing parse_reqdata...' data_dict = {} parser = XMLParser() try: parser.feed(post_data) root = parser.close() execElement = root.find("executions/execution") if not execElement: raise BleepParserError( "Did not find executions/execution element in post data") data_dict['execution_id'] = execElement.attrib["id"] data_dict['execution_href'] = execElement.attrib["href"] data_dict['execution_status'] = execElement.attrib["status"] data_dict['execution_user'] = execElement.find("user").text data_dict['execution_date_started'] = execElement.find( "date-started").text data_dict['execution_date_ended'] = execElement.find( "date-ended").text data_dict['execution_description'] = execElement.find( "description").text data_dict['execution_job_name'] = execElement.find("job/name").text data_dict['execution_job_group'] = execElement.find( "job/group").text data_dict['execution_job_project'] = execElement.find( "job/project").text data_dict['execution_job_description'] = execElement.find( "job/description").text except KeyError as (keyerr): print "Oops! missing key error: " + str(keyerr) except AttributeError as (atterr): print "Oops! missing attribute error" + str(atterr) except Exception as (parserr): raise BleepParserError( "Unexpected error when parsing post data. " + "Cause: " + str(parserr)) # merge the query_params data for k, v in self.parse_getparams(get_data).iteritems(): data_dict[k] = v print 'debuggery: parsed !' # return the dictionary data return data_dict
def _parse_xml(self, xml_data): """ Parse the xml into a python dictionary """ parser = XMLParser() tree = parser.feed(xml_data) root = parser.close() data_dict = XmlDictConfig(root) return data_dict
def _filter_hogs_and_genes(self, file_object): """ This function collect from an orthoxml file all data that is required to build Ham object based this filter object. Args: | file_object (:obj:`FileObject`): File Object of the orthoxml to parse. Returns: | :obj:`set` of gene unique ids, :obj:`set` of top level hog id. """ factory_filter = parsers.FilterOrthoXMLParser(self) parser_filter = XMLParser(target=factory_filter) for line in file_object: parser_filter.feed(line) return set(factory_filter.geneUniqueId), set(factory_filter.hogsId)
def read_transaction(self): parser = XMLParser() line = "" while not line.startswith('<?'): line = yield from self.readline() parser.feed(line) key, attrs = yield from self.read_instruction(line) if key == 'cancel': self.logger.info('Transaction cancelled') elif key == 'end': self.logger.info('Transaction complete') obj = parser.close() self.logger.debug(pretty_xml_str(obj)) return obj elif key == 'start': self.report_error("Start instruction given mid-transaction") else: self.report_error("Unrecognised instruction during transaction: key = {}, attrs = {}".format(key, attrs))
def _build_hogs_and_genes(self, file_object, filter_object): """ This function build from an orthoxml file all data that is required to build this Ham object (using the Ham filter object). Args: file_object (:obj:`FileObject`): File Object of the orthoxml to parse. filter_object (:obj:`ParserFilter`): :obj:`ParserFilter` use by OrthoXMLParser. Returns: :obj:`set` of top level :obj:`HOG` , :obj:`dict` of unique id with their :obj:`Gene`, :obj:`dict` of external id with their :obj:`Gene`. """ factory = parsers.OrthoXMLParser(self, filterObject=filter_object) parser = XMLParser(target=factory) for line in file_object: parser.feed(line) return factory.toplevel_hogs, factory.extant_gene_map, factory.external_id_mapper
def load_plist(self, src): def rat_iter(dict, key): if key == '_XBPS_ALTERNATIVES_': return nonlocal self rat = self.rat_from_xbps(dict, repo=self) if rat: self.index(rat) parser = XMLParser(target=XbpsPlistParser(iterator=rat_iter)) while True: data = src.read(16 * 1024) if not data: break parser.feed(data) parser.close()
def _unimplement_path_by_id(self,id,spec="9606"): self.pars["cmd"] = "search" self.pars["q"] = id content = self.tnf_obj.ask(self.pars) parser = XMLParser() parser.feed(content) elem = parser.close() # print content print elem.tag print elem.attrib for path in elem: print "hear "+path.tag+" "+path.attrib.keys() return
def main(): init() log.info("Parsing '%s'..." % os.path.basename(conf['source_file'])) stopwatch_set() target = CustomParser() parser = XMLParser(target=target) if PY2: text = open(conf['source_file']).read() else: text = codecs.open(conf['source_file'], encoding='utf-8').read() parser.feed(text) log.info('') total = ''; for key,value in stats.items(): total += str(key) total += ': ' total += str(value) total += '\n' log.info(total); log.info('Elapsed time: %s s' % stopwatch_get())
def fromScrollBoxToSigPlus(self,repertoireScroll,repertoireSigPlus): galerieEnCours=None fichierScrollBox=os.path.join(repertoireScroll,self.fichierScrollBox) if os.path.exists(repertoireScroll) and os.path.exists(fichierScrollBox): if not os.path.exists(repertoireSigPlus): #on crée un répertoire avec les mêmes droits que le répertoire source os.mkdir(repertoireSigPlus,os.stat(repertoireScroll).st_mode) #on va parser le fichier en question handler = ImageParser() parser = XMLParser(target=handler) ficToParse = codecs.open(fichierScrollBox, "r", "utf-8") donneesXML = u'%s' %(ficToParse.read()) parser.feed(donneesXML) galerieEnCours=parser.close() ficSigPlus=open(os.path.join(repertoireSigPlus,self.fichierDescrSigPlus),'w') for image in galerieEnCours.images: ficSigPlus.write(u'%s|%s|%s\n' %(image.nomFic,image.titre,image.comment)) shutil.copy(os.path.join(repertoireScroll,image.nomFic), repertoireSigPlus) msImage=time.mktime(image.date.timetuple()) os.utime(os.path.join(repertoireSigPlus,image.nomFic), (msImage, msImage)) ficSigPlus.close() else: print u'le fichier %s n\'a pu être trouvé, abandon\n' %(fichierScrollBox)
def parse_reqdata(self, get_data, post_data): """ parse query params and POST data into dictionary """ print 'debuggery: runing parse_reqdata...' data_dict = {} parser = XMLParser() try: parser.feed(post_data) root = parser.close() execElement = root.find("executions/execution") if not execElement: raise BleepParserError("Did not find executions/execution element in post data") data_dict['execution_id'] = execElement.attrib["id"] data_dict['execution_href'] = execElement.attrib["href"] data_dict['execution_status']= execElement.attrib["status"] data_dict['execution_user'] = execElement.find("user").text data_dict['execution_date_started']= execElement.find("date-started").text data_dict['execution_date_ended']= execElement.find("date-ended").text data_dict['execution_description']= execElement.find("description").text data_dict['execution_job_name']= execElement.find("job/name").text data_dict['execution_job_group']= execElement.find("job/group").text data_dict['execution_job_project']=execElement.find("job/project").text data_dict['execution_job_description']= execElement.find("job/description").text except KeyError as (keyerr): print "Oops! missing key error: " + str(keyerr) except AttributeError as (atterr): print "Oops! missing attribute error" + str(atterr) except Exception as (parserr): raise BleepParserError("Unexpected error when parsing post data. " + "Cause: " + str(parserr)) # merge the query_params data for k,v in self.parse_getparams(get_data).iteritems(): data_dict[k] = v print 'debuggery: parsed !' # return the dictionary data return data_dict
def get_tournaments(self, **kwargs): """ Returns set of tournaments from Challonge Keywords: state: One of { all, pending, in_progress, ended } type: One of { single_elimination, double_elimination, round_robin, swiss } created_after: YYYY-MM-DD created_before: YYYY-MM-DD subdomain: String """ if kwargs.has_key('state') and kwargs['state'] not in ['all', 'pending', 'in_progress', 'ended']: raise Exception("Invalid state parameter") if kwargs.has_key('type') and kwargs['type'] not in ['single_elimination', 'double_elimination', 'round_robin', 'swiss']: raise Exception("Invalid type parameter") response = self._call("tournaments", **kwargs) print "got", response target = TournamentConstructor() parser = XMLParser(target=target) parser.feed(response) return parser.close()
def parse_reqdata(self, get_data, post_data): """ parse query params and POST data into dictionary """ print 'debuggery: runing parse_reqdata...' data_dict = {} parser = XMLParser() tree = parser.feed(post_data) root = parser.close() data_dict = XmlDictConfig(root) # merge the query_params data for k,v in self.parse_getparams(get_data).iteritems(): data_dict[k] = v print 'debuggery: parsed !' # return the dictionary data return data_dict
import xml.etree.ElementTree as etree from xml.etree.ElementTree import XMLParser class MaxDepth: maxDepth = 0 depth = 0 def start(self, tag, attrib): self.depth += 1 if self.depth > self.maxDepth: self.maxDepth = self.depth def end(self, tag): self.depth -= 1 def data(self, data): pass def close(self): if self.maxDepth > 0: return self.maxDepth - 1 else: return self.maxDepth S = "" for _ in range(int(input())): S += input() target = MaxDepth() parser = XMLParser(target=target) parser.feed(S) print(parser.close())
depth = 0 @property def news_count(self): return self.__news_count def start(self, tag, attrib): # Called for each opening tag. if tag == 'item': self.__news_count+=1 self.depth += 1 if self.depth > self.maxDepth: self.maxDepth = self.depth def end(self, tag): self.depth -= 1 def close(self): # Called when all data has been parsed. return self.maxDepth target = MaxDepth() parser = XMLParser(target=target) rss = open('lentaru.rss') a = parser.feed(rss.read()) print(target.news_count) parser.close()
line=0 row=0 in_td=False def start(self, tag, attrib): if tag=='tr': self.row=0 if tag=='td': self.row+=1 self.in_td=True def end(self, tag): if tag=='tr': print() if tag=='td': self.in_td=False def data(self,data): if self.in_td: if self.row==1: print("%s," % data.strip(),end='') if self.row==2: print("%s," % ','.join(data.strip().split()),end='') if self.row==3: print("%s" % data.strip(),end='') def close(self): pass reader = io.open(sys.stdin.fileno(),'rb',0) xml=reader.readall() reader.close() parser=XMLParser(target=Parser()) parser.feed(xml) parser.close()
self.depth += 1 self.current_tag = tag # print the indented heading if tag == 'node' and self.depth > 1: #if 'tab' in attrib['TEXT']: #import pdb; pdb.set_trace() print (self.depth-2)*self.indent + attrib['TEXT'] def end(self, tag): # Called for each closing tag. self.depth -= 1 self.current_tag = None def data(self, data): if self.current_tag == 'p': bodyline = data.rstrip('\r\n') bodyindent = (self.depth-5)*self.indent + ": " #textlines = textwrap.wrap(bodytext, width=77-len(bodyindent), break_on_hyphens=False) #for line in textlines: print bodyindent + bodyline def close(self): # Called when all data has been parsed. pass outline = Outline() parser = XMLParser(target=outline, encoding='utf-8') fname = sys.argv[1] file = codecs.open(fname, 'r', encoding='utf-8') filelines = file.readlines(); print "filelines", type(filelines[0]), filelines[0] parser.feed(filelines[0].encode('utf-8')) parser.close()
current_tag = None def start(self, tag, attrib): # Called for each opening tag. self.depth += 1 self.current_tag = tag # print the indented heading if tag == 'node' and self.depth > 1: #if 'tab' in attrib['TEXT']: #import pdb; pdb.set_trace() print (self.depth-2)*self.indent + attrib['TEXT'] def end(self, tag): # Called for each closing tag. self.depth -= 1 self.current_tag = None def data(self, data): if self.current_tag == 'p': bodyline = data.rstrip('\r\n') bodyindent = (self.depth-5)*self.indent + ": " #textlines = textwrap.wrap(bodytext, width=77-len(bodyindent), break_on_hyphens=False) #for line in textlines: print bodyindent + bodyline def close(self): # Called when all data has been parsed. pass outline = Outline() parser = XMLParser(target=outline) fname = sys.argv[1] filelines = open(fname).readlines() parser.feed(''.join(filelines)) parser.close()
class XMPPComponent(asyncore.dispatcher_with_send): parser = None root = None current = None depth = 0 accounts = dict() config = None database = None mapping = dict() lock = threading.Lock() def __init__(self, config): asyncore.dispatcher_with_send.__init__(self) self.config = config self.database = get_database(self.config.database) self.parser = XMLParser(target = self) members = inspect.getmembers(self, predicate=inspect.ismethod) for m in members: if hasattr(m[1], "callback"): fn = m[1] fname = m[0] self.mapping[fn.callback] = getattr(self, fname) self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.connect( (self.config.xmpp_host, self.config.xmpp_port) ) def shutdown(self): for account in self.accounts.values(): account.shutdown() def update(self): for jid, number, password in self.database.read_accounts(): if jid not in self.accounts: logger.info("Found new account: %s" % jid) number = number.encode("UTF-8") password = password.encode("UTF-8") self.accounts[jid] = Account(jid, number, password, self, self.config) ############################ # Jabber message callbacks # ############################ @On(handshake_tag) def streamReady(self, _): for jid, number, password in self.database.read_accounts(): number = number.encode("UTF-8") password = password.encode("UTF-8") self.accounts[jid] = Account(jid, number, password, self, self.config) self.httpserver = HttpServer( self.config.http_bind , self.config.http_port , serve_file(self.accounts, self.config) ) @On("stream:error") def streamError(self, msg): raise Exception(msg) @On(presence_tag) def handlePresence(self, message): jabberFrom = Jid(message.get("from")).bare if jabberFrom in self.accounts: self.accounts[jabberFrom].incomingXMPPPresence(message) @On(iq_tag) def handleIq(self, msg): logger.debug("Unhandled Iq Stanza: %s" % ET.tostring(msg)) @On(message_tag) def handleMessage(self, message): jabberFrom = Jid(message.get("from")).bare if jabberFrom in self.accounts: self.accounts[jabberFrom].incomingXMPPMessage(message) ###################### # Asyncore callbacks # ###################### def handle_connect(self): self.write("<?xml version='1.0' encoding='UTF-8'?>") self.write("<stream:stream to='%s' "\ "xmlns:stream='http://etherx.jabber.org/streams' "\ "xmlns='jabber:component:accept'>" % self.config.transport_domain ) def write(self, buf): logger.debug("to server: %s" % buf) self.send(buf) def handle_read(self): buf = self.recv(4096) logger.debug("from server: %s" % buf) self.parser.feed(buf) def handle_close(self): # TODO: reconnect here pass ####################### # XMLParser callbacks # ####################### def start(self, tag, attrib): if self.current is None: self.current = ET.Element(tag, attrib) else: tmp = ET.SubElement(self.current, tag, attrib) tmp.parent = self.current self.current = tmp if self.root is None: self.root = self.current self.depth += 1 if self.depth == 1: pw = self.config.transport_password handshake = ET.Element("handshake") handshake.text = hashlib.sha1(attrib['id'] + pw).hexdigest() handshake = ET.tostring(handshake) self.write(handshake) def end(self, tag): if self.depth == 2: if tag in self.mapping: self.mapping[tag](self.current) try: self.root.remove(self.current) except: pass self.current = self.current.parent self.depth -= 1 def data(self, data): if self.current.text is None: self.current.text = data else: self.current.text += data