def run(self, profile): path = os.path.join(profile['APPDATA'], 'FileZilla') if not os.path.exists(path): return pwd_found = [] for file in ['sitemanager.xml', 'recentservers.xml', 'filezilla.xml']: xml_file = os.path.join(path, file) if os.path.exists(xml_file): tree = ElementTree(file=xml_file) if tree.findall('Servers/Server'): servers = tree.findall('Servers/Server') else: servers = tree.findall('RecentServers/Server') for server in servers: host = server.find('Host') port = server.find('Port') login = server.find('User') password = server.find('Pass') # if all((host, port, login)) does not work if host is not None and port is not None and login is not None: values = {'Host': host.text, 'Port': port.text, 'Login': login.text} if password is not None: if 'encoding' in password.attrib and password.attrib['encoding'] == 'base64': values['Password'] = base64.b64decode(password.text) else: values['Password'] = password.text if values: pwd_found.append(values) return pwd_found
def _split_configuration(self, projectfile, temp_dir): num_pieces = multiprocessing.cpu_count() tree = ET(file=unicode(projectfile)) num_files = len(tree.findall('./files/file')) splitfiles = [] files_per_job = int(math.ceil(float(num_files)/num_pieces)) for idx in xrange(num_pieces): tree = ET(file=unicode(projectfile)) root = tree.getroot() start = idx*files_per_job end = start + files_per_job if end > num_files: end = None for elem in ('files', 'images', 'pages', 'file-name-disambiguation'): elem_root = root.find(elem) to_keep = elem_root.getchildren()[start:end] to_remove = [x for x in elem_root.getchildren() if not x in to_keep] for node in to_remove: elem_root.remove(node) out_file = temp_dir / "{0}-{1}.ScanTailor".format(projectfile.stem, idx) tree.write(unicode(out_file)) splitfiles.append(out_file) return splitfiles
def extractImages(html): if html is None: return [], html tree = ElementTree() tree.parse(StringIO(html)) imagetags = tree.findall(".//img") images = [] for tag in imagetags: image = tag.get('src') path, name = os.path.split(image) if image not in images: images.append(image) tag.set('alt', name) tag.set('title', name) #index files for multipart storage index = {} for image in images: path, name = os.path.split(image) index[image] = '0x%08x' % binascii.crc32(name) #update html email image tags for tag in imagetags: image = tag.get('src') tag.set('src', "cid:%s" % index[image]) html = StringIO() tree.write(html) html.write("\n") return [index, html.getvalue()]
def updatebq(self): from xml.etree.cElementTree import ElementTree tree = ElementTree() tree.parse(GSXML) tvlist = [] for iptv in tree.findall('iptv'): name = iptv.findtext('name').title() protocol, serviceType, bufferSize, epgId = iptv.findtext( 'type').split(':') uri = iptv.findtext('uri') if protocol in 'livestreamer': uri = 'http://localhost:88/' + uri uri = uri.replace(':', '%3a') service = '#SERVICE {s}:0:1:{e}:{e}:0:0:0:0:0:{u}:{n}\n'.format( s=serviceType, e=epgId, u=uri, n=name) tvlist.append((name, service)) tvlist = sorted(tvlist, key=lambda channel: channel[0]) with open(GSBQ, 'w') as f: f.write('#NAME GreekStreamTV\n') for name, service in tvlist: f.write(service) com = 'cat /usr/lib/enigma2/python/Plugins/Satdreamgr/UpdateBouquet/stream.xml ; rm /usr/lib/enigma2/python/Plugins/Satdreamgr/UpdateBouquet/stream.xml' out = os.popen(com) return list
def _split_configuration(self, projectfile, temp_dir): num_pieces = multiprocessing.cpu_count() tree = ET(file=projectfile) num_files = len(tree.findall('./files/file')) splitfiles = [] files_per_job = int(math.ceil(float(num_files)/num_pieces)) for idx in xrange(num_pieces): tree = ET(file=projectfile) root = tree.getroot() start = idx*files_per_job end = start + files_per_job if end > num_files: end = None for elem in ('files', 'images', 'pages', 'file-name-disambiguation'): elem_root = root.find(elem) to_keep = elem_root.getchildren()[start:end] to_remove = [x for x in elem_root.getchildren() if not x in to_keep] for node in to_remove: elem_root.remove(node) out_file = os.path.join(temp_dir, "{0}-{1}.ScanTailor".format( os.path.splitext(os.path.basename( projectfile))[0], idx)) tree.write(out_file) splitfiles.append(out_file) return splitfiles
def terminology_identifiers(self): tag_name = "{http://openehr.org/Terminology.xsd}TerminologyIdentifiers" term_ids = [] with open(OPENEHR_TERMINOLOGY_FILEPATH) as terminology_file: root = ET().parse(terminology_file) term_ids = [term_id.attrib['VSAB'] for term_id in root.findall(tag_name)] return term_ids
def run(self, software_name=None): path = os.path.join(constant.profile['APPDATA'], u'SQL Developer') if os.path.exists(path): self._passphrase = self.get_passphrase(path) if self._passphrase: print_debug( 'INFO', u'Passphrase found: {passphrase}'.format( passphrase=self._passphrase)) xml_name = u'connections.xml' xml_file = None if os.path.exists(os.path.join(path, xml_name)): xml_file = os.path.join(path, xml_name) else: for p in os.listdir(path): if p.startswith('system'): new_directory = os.path.join(path, p) for pp in os.listdir(new_directory): if pp.startswith( u'o.jdeveloper.db.connection'): if os.path.exists( os.path.join( new_directory, pp, xml_name)): xml_file = os.path.join( new_directory, pp, xml_name) break if xml_file: renamed_value = { 'sid': 'SID', 'port': 'Port', 'hostname': 'Host', 'user': '******', 'password': '******', 'ConnName': 'Name', 'customUrl': 'URL', 'SavePassword': '******', 'driver': 'Driver' } tree = ElementTree(file=xml_file) pwd_found = [] for e in tree.findall('Reference'): values = {} for ee in e.findall('RefAddresses/StringRefAddr'): if ee.attrib[ 'addrType'] in renamed_value and ee.find( 'Contents').text is not None: name = renamed_value[ee.attrib['addrType']] value = ee.find( 'Contents' ).text if name != 'Password' else self.decrypt( ee.find('Contents').text) values[name] = value pwd_found.append(values) return pwd_found
def all_playlists(filename): ''' Return a dict of all playlists in the Rhythmbox XML file, whose keys are the playlist names and whose values are lists of files. ''' tree = ElementTree(file=filename) return { pl.get('name') : _playlist_files(pl) for pl in tree.findall("playlist") }
def one_playlist(filename, playlistname): '''Return the files in the playlist in the Rhythmbox XML file.''' tree = ElementTree(file=filename) for pl in tree.findall("playlist"): # My old version of etree doesn't support attributes in the # path spec, so we check them manually. if pl.get('name') == playlistname: return _playlist_files(pl) raise ValueError('Playlist not found: ' + playlistname)
def dowaitcount(self, original_request, collection, count, label=""): hrefs = [] for _ignore in range(self.manager.server_info.waitcount): req = request(self.manager) req.method = "PROPFIND" req.host = original_request.host req.port = original_request.port req.ruris.append(collection[0]) req.ruri = collection[0] req.headers["Depth"] = "1" if len(collection[1]): req.user = collection[1] if len(collection[2]): req.pswd = collection[2] req.data = data(self.manager) req.data.value = """<?xml version="1.0" encoding="utf-8" ?> <D:propfind xmlns:D="DAV:"> <D:prop> <D:getetag/> </D:prop> </D:propfind> """ req.data.content_type = "text/xml" result, _ignore_resulttxt, response, respdata = self.dorequest(req, False, False, label="%s | %s %d" % (label, "WAITCOUNT", count)) hrefs = [] if result and (response is not None) and (response.status == 207) and (respdata is not None): tree = ElementTree(file=StringIO(respdata)) for response in tree.findall("{DAV:}response"): href = response.findall("{DAV:}href")[0] if href.text.rstrip("/") != collection[0].rstrip("/"): hrefs.append(href.text) if len(hrefs) == count: return True, None delay = self.manager.server_info.waitdelay starttime = time.time() while (time.time() < starttime + delay): pass if self.manager.debug and hrefs: # Get the content of each resource rdata = "" for href in hrefs: result, respdata = self.doget(req, (href, collection[1], collection[2],), label) test = "unknown" if respdata.startswith("BEGIN:VCALENDAR"): uid = respdata.find("UID:") if uid != -1: uid = respdata[uid + 4:uid + respdata[uid:].find("\r\n")] test = self.uidmaps.get(uid, "unknown") rdata += "\n\nhref: {h}\ntest: {t}\n\n{r}\n".format(h=href, t=test, r=respdata) return False, rdata else: return False, len(hrefs)
def dowaitcount(self, original_request, collection, count, label=""): hrefs = [] for _ignore in range(self.manager.server_info.waitcount): req = request(self.manager) req.method = "PROPFIND" req.host = original_request.host req.port = original_request.port req.ruris.append(collection[0]) req.ruri = collection[0] req.headers["Depth"] = "1" if len(collection[1]): req.user = collection[1] if len(collection[2]): req.pswd = collection[2] req.data = data(self.manager) req.data.value = """<?xml version="1.0" encoding="utf-8" ?> <D:propfind xmlns:D="DAV:"> <D:prop> <D:getetag/> </D:prop> </D:propfind> """ req.data.content_type = "text/xml" result, _ignore_resulttxt, response, respdata = self.dorequest(req, False, False, label="%s | %s %d" % (label, "WAITCOUNT", count)) hrefs = [] if result and (response is not None) and (response.status == 207) and (respdata is not None): tree = ElementTree(file=StringIO(respdata)) for response in tree.findall("{DAV:}response"): href = response.findall("{DAV:}href")[0] if href.text.rstrip("/") != collection[0].rstrip("/"): hrefs.append(href.text) if len(hrefs) == count: return True, None delay = self.manager.server_info.waitdelay starttime = time.time() while (time.time() < starttime + delay): pass if self.manager.debug and hrefs: # Get the content of each resource rdata = "" for href in hrefs: result, respdata = self.doget(req, (href, collection[1], collection[2],), label) test = "unknown" if respdata.startswith("BEGIN:VCALENDAR"): uid = respdata.find("UID:") if uid != -1: uid = respdata[uid + 4:uid + respdata[uid:].find("\r\n")] test = self.uidmaps.get(uid, "unknown") rdata += "\n\nhref: {h}\ntest: {t}\n\n{r}\n".format(h=href, t=test, r=respdata) return False, rdata else: return False, len(hrefs)
def parseStreamListAthanmenu(self): Athanlist1 = [] tree = ElementTree() tree.parse(self.xml) for Athan in tree.findall('Choice'): Contnt = str(Athan.findtext('Contnt')) name = str(Athan.findtext('name')) Athanlist1.append({'Contnt': Contnt, 'name': name}) return Athanlist1
def parseListAthanAyames(self): Athanlist1 = [] tree = ElementTree() tree.parse(self.xml) for Athan in tree.findall('ayames'): name = str(Athan.findtext('name')) date1 = str(Athan.findtext('date1')) date2 = str(Athan.findtext('date2')) Athanlist1.append({'name': name, 'date1': date1, 'date2': date2}) return Athanlist1
def parseListAthanFlash(self): Athanlist1 = [] tree = ElementTree() tree.parse(self.xml) for Athan in tree.findall('flash'): name = str(Athan.findtext('name')) url = str(Athan.findtext('url')) urimg = str(Athan.findtext('urimg')) Athanlist1.append({'name': name, 'url': url, 'urimg': urimg}) return Athanlist1
def parseStreamListAthanmenu_1(self): Athanlist1 = [] tree = ElementTree() tree.parse(self.xml) for Athan in tree.findall('Contry'): name = str(Athan.findtext('name')) url = str(Athan.findtext('url')) Id = str(Athan.findtext('Id')) Athanlist1.append({'name': name, 'url': url, 'Id': Id}) return Athanlist1
def parseStreamList(self): tvlist = [] tree = ElementTree() tree.parse(self.xml) for iptv in tree.findall('iptv'): tvlist.append({'name': str(iptv.findtext('name')).title(), 'icon': str(iptv.findtext('icon')), 'type': str(iptv.findtext('type')), 'uri': self.parseStreamURI(str(iptv.findtext('uri')))}) return sorted(tvlist, key=lambda item: item['name'])
def __init__(self): self._path = os.path.dirname(os.path.abspath(__file__)) print "Loading Rhythmbox database...", tree = ElementTree() tree.parse(database) self._entries = tree.findall("entry") self.songs = {} self.artists = {} self.albums = {} self.relationships = {} self.load_rhythmbox_db()
def parseStreamList(self): tvlist = [] tree = ElementTree() tree.parse(self.xml) for iptv in tree.findall('iptv'): n = str(iptv.findtext('name')) i = str(iptv.findtext('icon')) u = str(iptv.findtext('uri')) t = str(iptv.findtext('type')) tvlist.append({'name':n, 'icon':i, 'type':t, 'uri':self.parseStreamURI(u)}) return tvlist
def parseStreamList(self): tvlist = [] tree = ElementTree() tree.parse(self.xml) for iptv in tree.findall("iptv"): n = str(iptv.findtext("name")) i = str(iptv.findtext("icon")) u = str(iptv.findtext("uri")) t = str(iptv.findtext("type")) tvlist.append({"name": n, "icon": i, "type": t, "uri": self.parseStreamURI(u)}) return tvlist
def parseStreamList(self): tvlist = [] tree = ElementTree() tree.parse(self.xml) for iptv in tree.findall("iptv"): tvlist.append({ "name" : str(iptv.findtext("name")).title(), "icon" : str(iptv.findtext("icon")), "type" : str(iptv.findtext("type")), "uri" : self.parseStreamURI(str(iptv.findtext("uri"))) }) return sorted(tvlist, key=lambda item: item["name"])
def parseStreamDB(filename): tvlist = [] tree = ElementTree() tree.parse(filename) for iptv in tree.findall('iptv'): n = str(iptv.findtext('name')) i = str(iptv.findtext('icon')) u = str(iptv.findtext('uri')) t = str(iptv.findtext('type')) tvlist.append({'name':n, 'icon':i, 'type':t, 'uri':u}) return tvlist
def parseStreamDB(filename): tvlist = [] tree = ElementTree() tree.parse(filename) for iptv in tree.findall('iptv'): n = str(iptv.findtext('name')) i = str(iptv.findtext('icon')) u = str(iptv.findtext('uri')) t = str(iptv.findtext('type')) tvlist.append({'name':n, 'icon':i, 'type':t, 'uri':u}) return tvlist
def parseStreamList(self): tvlist = [] tree = ElementTree() tree.parse(self.xml) for iptv in tree.findall('iptv'): n = str(iptv.findtext('name')) i = str(iptv.findtext('icon')) u = str(iptv.findtext('uri')) t = str(iptv.findtext('type')) tvlist.append({'name':n, 'icon':i, 'type':t, 'uri':self.parseStreamURI(u)}) return tvlist
def parseStreamList(self): tvlist = [] tree = ElementTree() tree.parse(self.xml) for iptv in tree.findall('iptv'): tvlist.append({ 'name': str(iptv.findtext('name')).title(), 'icon': str(iptv.findtext('icon')), 'type': str(iptv.findtext('type')), 'uri': self.parseStreamURI(str(iptv.findtext('uri'))) }) return sorted(tvlist, key=lambda item: item['name'])
def run(self): pwd_found = [] for xml_file in homes.get(file=[ os.path.join(d, f) for d in ('.filezilla', '.config/filezilla') for f in ('sitemanager.xml', 'recentservers.xml', 'filezilla.xml') ]): if os.path.exists(xml_file): tree = ElementTree(file=xml_file) servers = tree.findall('Servers/Server') if tree.findall( 'Servers/Server') else tree.findall('RecentServers/Server') for server in servers: host = server.find('Host') port = server.find('Port') login = server.find('User') password = server.find('Pass') if host is not None and port is not None and login is not None: values = { 'Host': host.text, 'Port': port.text, 'Login': login.text, } if password is not None: if 'encoding' in password.attrib and password.attrib[ 'encoding'] == 'base64': values['Password'] = base64.b64decode( password.text) else: values['Password'] = password.text pwd_found.append(values) return pwd_found
def run(self): path = os.path.join(constant.profile['APPDATA'], u'FileZilla') if os.path.exists(path): pwd_found = [] for file in [u'sitemanager.xml', u'recentservers.xml', u'filezilla.xml']: xml_file = os.path.join(path, file) if os.path.exists(xml_file): tree = ElementTree(file=xml_file) if tree.findall('Servers/Server'): servers = tree.findall('Servers/Server') else: servers = tree.findall('RecentServers/Server') for server in servers: host = server.find('Host') port = server.find('Port') login = server.find('User') password = server.find('Pass') # if all((host, port, login)) does not work if host is not None and port is not None and login is not None: values = { 'Host': host.text, 'Port': port.text, 'Login': login.text, } if password: if 'encoding' in password.attrib and password.attrib['encoding'] == 'base64': values['Password'] = base64.b64decode(password.text) else: values['Password'] = password.text if values: pwd_found.append(values) return pwd_found
def parseStreamListAthan(self): Athanlist = [] im = 0 tree = ElementTree() tree.parse(self.xml) for Athan in tree.findall('Choice'): Contnt = str(Athan.findtext('Contnt')) Contr = str(Athan.findtext('Contr')) fajr = str(Athan.findtext('fajr')) sunrise = str(Athan.findtext('sunrise')) dhuhr = str(Athan.findtext('dhuhr')) asr = str(Athan.findtext('asr')) maghrib = str(Athan.findtext('maghrib')) isha = str(Athan.findtext('isha')) qiyam = str(Athan.findtext('qiyam')) url = str(Athan.findtext('url')) Id = str(Athan.findtext('Id')) Lati = str(Athan.findtext('Lati')) Longit = str(Athan.findtext('Longit')) hijri = str(Athan.findtext('hijri')) clac = str(Athan.findtext('clac')) next = str(Athan.findtext('next')) bled = str(Athan.findtext('bled')) haiaa = str(Athan.findtext('haiaa')) WeatheId = str(Athan.findtext('WeatheId')) Athanlist.append({ 'Contnt': Contnt, 'Contr': Contr, 'fajr': fajr, 'sunrise': sunrise, 'sunrise': sunrise, 'dhuhr': dhuhr, 'asr': asr, 'maghrib': maghrib, 'isha': isha, 'qiyam': qiyam, 'url': url, 'Id': Id, 'Lati': Lati, 'Longit': Longit, 'hijri': hijri, 'clac': clac, 'next': next, 'bled': bled, 'haiaa': haiaa, 'WeatheId': WeatheId }) return Athanlist
def run(self): pwd_found = [] for xml_file in homes.get(file=[ os.path.join(d, f) for d in ('.filezilla', '.config/filezilla') for f in ('sitemanager.xml', 'recentservers.xml', 'filezilla.xml') ]): if os.path.exists(xml_file): tree = ElementTree(file=xml_file) servers = tree.findall('Servers/Server') if tree.findall('Servers/Server') else tree.findall( 'RecentServers/Server') for server in servers: host = server.find('Host') port = server.find('Port') login = server.find('User') password = server.find('Pass') if host is not None and port is not None and login is not None: values = { 'Host': host.text, 'Port': port.text, 'Login': login.text, } if password is not None: if 'encoding' in password.attrib and password.attrib['encoding'] == 'base64': values['Password'] = base64.b64decode(password.text) else: values['Password'] = password.text pwd_found.append(values) return pwd_found
def dofindcontains(self, original_request, collection, match, label=""): hresult = "" uri = collection[0] req = request(self.manager) req.method = "PROPFIND" req.host = original_request.host req.port = original_request.port req.ruris.append(uri) req.ruri = uri req.headers["Depth"] = "1" if len(collection[1]): req.user = collection[1] if len(collection[2]): req.pswd = collection[2] req.data = data(self.manager) req.data.value = """<?xml version="1.0" encoding="utf-8" ?> <D:propfind xmlns:D="DAV:"> <D:prop> <D:getetag/> </D:prop> </D:propfind> """ req.data.content_type = "text/xml" result, _ignore_resulttxt, response, respdata = self.dorequest(req, False, False, label="%s | %s" % (label, "FINDNEW")) if result and (response is not None) and (response.status == 207) and (respdata is not None): try: tree = ElementTree(file=StringIO(respdata)) except Exception: return hresult request_uri = req.getURI(self.manager.server_info) for response in tree.findall("{DAV:}response"): # Get href for this response href = response.findall("{DAV:}href") if len(href) != 1: return False, " Wrong number of DAV:href elements\n" href = href[0].text if href != request_uri: _ignore_result, respdata = self.doget(req, (href, collection[1], collection[2],), label) if respdata.find(match) != -1: break else: href = None return href
def dofindcontains(self, original_request, collection, match, label=""): hresult = "" uri = collection[0] req = request(self.manager) req.method = "PROPFIND" req.host = original_request.host req.port = original_request.port req.ruris.append(uri) req.ruri = uri req.headers["Depth"] = "1" if len(collection[1]): req.user = collection[1] if len(collection[2]): req.pswd = collection[2] req.data = data(self.manager) req.data.value = """<?xml version="1.0" encoding="utf-8" ?> <D:propfind xmlns:D="DAV:"> <D:prop> <D:getetag/> </D:prop> </D:propfind> """ req.data.content_type = "text/xml" result, _ignore_resulttxt, response, respdata = self.dorequest(req, False, False, label="%s | %s" % (label, "FINDNEW")) if result and (response is not None) and (response.status == 207) and (respdata is not None): try: tree = ElementTree(file=StringIO(respdata)) except Exception: return hresult request_uri = req.getURI(self.manager.server_info) for response in tree.findall("{DAV:}response"): # Get href for this response href = response.findall("{DAV:}href") if len(href) != 1: return False, " Wrong number of DAV:href elements\n" href = href[0].text if href != request_uri: _ignore_result, respdata = self.doget(req, (href, collection[1], collection[2],), label) if respdata.find(match) != -1: break else: href = None return href
def parseTrackData(self, spotquery, spotify_metadata): """Function for parsing XML response from Spotify""" root = XML() root.parse(spotify_metadata) tracks = root.findall('sp:track', NS) track_list = [] for track in tracks: track_dict = {'URL' : SPOTIFY_OUTPUT_URL % track.get('href').split(':')[2], 'query' : spotquery['query'], 'locations' : spotquery['locations'] } for tag in self.tags_wanted.keys(): track_dict[ tag ] = track.findtext( self.tags_wanted[tag], None, NS) track_list.append(track_dict) return track_list
def get_list(self, filename): divpng = LoadPixmap(cached=True, path=resolveFilename( SCOPE_SKIN_IMAGE, 'skin_default/DEInfo/div-h-extended.png')) del driver_list[:] tree = ElementTree() tree.parse(filename) for channel in tree.findall('line'): id = channel.findtext('ID') name = channel.findtext('name') firmware = channel.findtext('firmware') driver = channel.findtext('driver') date = channel.findtext('date') chan_tulpe = [name, id, firmware, driver, date] driver_list.append(chan_tulpe)
def parse_input_xml(path): """Parse a SWIG pre-processed XML file """ tree = ElementTree() try: tree.parse(path) except SyntaxError as exc: exc.args = ("could not parse XML input from '{}': {}".format( str(exc), path), ) raise # find root of preprocessing interface parse tree for incl in tree.findall('include'): if get_swig_attr(incl.find('module'), 'name') == 'swiglal_preproc': return incl raise RuntimeError( "could not find root of preprocessing interface parse tree", )
def run(self): pwd_found = [] for home in homes.get(directory=u'.dbvis'): path = os.path.join(home, u'config70', u'dbvis.xml') if os.path.exists(path): tree = ElementTree(file=path) elements = { 'Alias': 'Name', 'Userid': 'Login', 'Password': '******', 'UrlVariables//Driver': 'Driver' } for e in tree.findall('Databases/Database'): values = {} for elem in elements: try: if elem != "Password": values[elements[elem]] = e.find(elem).text else: values[elements[elem]] = self.decrypt( e.find(elem).text) except Exception: pass try: elem = e.find('UrlVariables') for ee in elem.getchildren(): for ele in ee.getchildren(): if 'Server' == ele.attrib['UrlVariableName']: values['Host'] = str(ele.text) if 'Port' == ele.attrib['UrlVariableName']: values['Port'] = str(ele.text) if 'SID' == ele.attrib['UrlVariableName']: values['SID'] = str(ele.text) except Exception: pass if values: pwd_found.append(values) return pwd_found
def extractProperty(self, propertyname, respdata): try: tree = ElementTree(file=StringIO(respdata)) except Exception: return None for response in tree.findall("{DAV:}response"): # Get all property status propstatus = response.findall("{DAV:}propstat") for props in propstatus: # Determine status for this propstat status = props.findall("{DAV:}status") if len(status) == 1: statustxt = status[0].text status = False if statustxt.startswith("HTTP/1.1 ") and (len(statustxt) >= 10): status = (statustxt[9] == "2") else: status = False if not status: continue # Get properties for this propstat prop = props.findall("{DAV:}prop") if len(prop) != 1: return False, " Wrong number of DAV:prop elements\n" for child in prop[0].getchildren(): fqname = child.tag if len(child): # Copy sub-element data as text into one long string and strip leading/trailing space value = "" for p in child.getchildren(): temp = tostring(p) temp = temp.strip() value += temp else: value = child.text if fqname == propertyname: return value return None
def run(self, profile): path = os.path.join(profile['USERPROFILE'], '.dbvis', 'config70', 'dbvis.xml') if not os.path.isfile(path): return tree = ElementTree(file=path) pwd_found = set() elements = { 'Alias': 'Name', 'Userid': 'Login', 'Password': '******', 'UrlVariables//Driver': 'Driver' } for e in tree.findall('Databases/Database'): values = {} for elem in elements: try: if elem != "Password": values[elements[elem]] = e.find(elem).text else: values[elements[elem]] = self.decrypt( e.find(elem).text) except Exception: pass try: elem = e.find('UrlVariables') for ee in elem.getchildren(): for ele in ee.getchildren(): if 'Server' == ele.attrib['UrlVariableName']: values['Host'] = str(ele.text) if 'Port' == ele.attrib['UrlVariableName']: values['Port'] = str(ele.text) if 'SID' == ele.attrib['UrlVariableName']: values['SID'] = str(ele.text) except Exception: pass if values: pwd_found.add(list(values.values())) return list(pwd_found)
def extractProperty(self, propertyname, respdata): try: tree = ElementTree(file=StringIO(respdata)) except Exception: return None for response in tree.findall("{DAV:}response"): # Get all property status propstatus = response.findall("{DAV:}propstat") for props in propstatus: # Determine status for this propstat status = props.findall("{DAV:}status") if len(status) == 1: statustxt = status[0].text status = False if statustxt.startswith("HTTP/1.1 ") and (len(statustxt) >= 10): status = (statustxt[9] == "2") else: status = False if not status: continue # Get properties for this propstat prop = props.findall("{DAV:}prop") if len(prop) != 1: return False, " Wrong number of DAV:prop elements\n" for child in prop[0].getchildren(): fqname = child.tag if len(child): # Copy sub-element data as text into one long string and strip leading/trailing space value = "" for p in child.getchildren(): temp = tostring(p) temp = temp.strip() value += temp else: value = child.text if fqname == propertyname: return value return None
def extractElements(self, elementpath, parent, respdata): try: tree = ElementTree() tree.parse(StringIO(respdata)) except: return None if parent: tree_root = nodeForPath(tree.getroot(), parent) if not tree_root: return None tree_root = tree_root[0] # Handle absolute root element if elementpath[0] == '/': elementpath = elementpath[1:] root_path, child_path = xmlPathSplit(elementpath) if child_path: if tree_root.tag != root_path: return None e = tree_root.findall(child_path) else: e = (tree_root,) else: # Strip off the top-level item if elementpath[0] == '/': elementpath = elementpath[1:] splits = elementpath.split('/', 1) root = splits[0] if tree.getroot().tag != root: return None elif len(splits) == 1: return tree.getroot().text else: elementpath = splits[1] e = tree.findall(elementpath) if e is not None: return [item.text for item in e] else: return None
def extractElements(self, elementpath, parent, respdata): try: tree = ElementTree() tree.parse(StringIO(respdata)) except: return None if parent: tree_root = nodeForPath(tree.getroot(), parent) if not tree_root: return None tree_root = tree_root[0] # Handle absolute root element if elementpath[0] == '/': elementpath = elementpath[1:] root_path, child_path = xmlPathSplit(elementpath) if child_path: if tree_root.tag != root_path: return None e = tree_root.findall(child_path) else: e = (tree_root,) else: # Strip off the top-level item if elementpath[0] == '/': elementpath = elementpath[1:] splits = elementpath.split('/', 1) root = splits[0] if tree.getroot().tag != root: return None elif len(splits) == 1: return tree.getroot().text else: elementpath = splits[1] e = tree.findall(elementpath) if e is not None: return [item.text for item in e] else: return None
def run(self): pwd_found = [] for home in homes.get(directory=u'.dbvis'): path = os.path.join(home, u'config70', u'dbvis.xml') if os.path.exists(path): tree = ElementTree(file=path) elements = {'Alias': 'Name', 'Userid': 'Login', 'Password': '******', 'UrlVariables//Driver': 'Driver'} for e in tree.findall('Databases/Database'): values = {} for elem in elements: try: if elem != "Password": values[elements[elem]] = e.find(elem).text else: values[elements[elem]] = self.decrypt(e.find(elem).text) except Exception: pass try: elem = e.find('UrlVariables') for ee in elem.getchildren(): for ele in ee.getchildren(): if 'Server' == ele.attrib['UrlVariableName']: values['Host'] = str(ele.text) if 'Port' == ele.attrib['UrlVariableName']: values['Port'] = str(ele.text) if 'SID' == ele.attrib['UrlVariableName']: values['SID'] = str(ele.text) except Exception: pass if values: pwd_found.append(values) return pwd_found
def run(self): path = os.path.join(constant.profile['APPDATA'], u'SQL Developer') if os.path.exists(path): self._passphrase = self.get_passphrase(path) if self._passphrase: self.debug(u'Passphrase found: {passphrase}'.format(passphrase=self._passphrase)) xml_name = u'connections.xml' xml_file = None if os.path.exists(os.path.join(path, xml_name)): xml_file = os.path.join(path, xml_name) else: for p in os.listdir(path): if p.startswith('system'): new_directory = os.path.join(path, p) for pp in os.listdir(new_directory): if pp.startswith(u'o.jdeveloper.db.connection'): if os.path.exists(os.path.join(new_directory, pp, xml_name)): xml_file = os.path.join(new_directory, pp, xml_name) break if xml_file: renamed_value = {'sid': 'SID', 'port': 'Port', 'hostname': 'Host', 'user': '******', 'password': '******', 'ConnName': 'Name', 'customUrl': 'URL', 'SavePassword': '******', 'driver': 'Driver'} tree = ElementTree(file=xml_file) pwd_found = [] for e in tree.findall('Reference'): values = {} for ee in e.findall('RefAddresses/StringRefAddr'): if ee.attrib['addrType'] in renamed_value and ee.find('Contents').text is not None: name = renamed_value[ee.attrib['addrType']] value = ee.find('Contents').text if name != 'Password' else self.decrypt( ee.find('Contents').text) values[name] = value pwd_found.append(values) return pwd_found
def updatebq(self): from xml.etree.cElementTree import ElementTree tree = ElementTree() tree.parse(GSXML) tvlist = [] for iptv in tree.findall("iptv"): name = iptv.findtext("name").title() (protocol, serviceType, bufferSize, epgId) = iptv.findtext("type").split(":") uri = iptv.findtext("uri") if protocol in "livestreamer": uri = "http://localhost:88/" + uri uri = uri.replace(":", "%3a") service = "#SERVICE {s}:0:1:{e}:{e}:0:0:0:0:0:{u}:{n}\n".format( s=serviceType, e=epgId, u=uri, n=name) tvlist.append((name, service)) tvlist = sorted(tvlist, key=lambda channel: channel[0]) # sort by name with open(GSBQ, "w") as f: f.write("#NAME GreekStreamTV\n") for (name, service) in tvlist: f.write(service)
def dowaitcount(self, collection, count, label=""): for _ignore in range(self.manager.server_info.waitcount): req = request(self.manager) req.method = "PROPFIND" req.ruris.append(collection[0]) req.ruri = collection[0] req.headers["Depth"] = "1" if len(collection[1]): req.user = collection[1] if len(collection[2]): req.pswd = collection[2] req.data = data() req.data.value = """<?xml version="1.0" encoding="utf-8" ?> <D:propfind xmlns:D="DAV:"> <D:prop> <D:getetag/> </D:prop> </D:propfind> """ req.data.content_type = "text/xml" result, _ignore_resulttxt, response, respdata = self.dorequest(req, False, False, label="%s | %s %d" % (label, "WAITCOUNT", count)) ctr = 0 if result and (response is not None) and (response.status == 207) and (respdata is not None): tree = ElementTree(file=StringIO(respdata)) for response in tree.findall("{DAV:}response"): ctr += 1 if ctr - 1 == count: return None delay = self.manager.server_info.waitdelay starttime = time.time() while (time.time() < starttime + delay): pass else: return ctr - 1
class GraphMLReader(GraphML): """Read a GraphML document. Produces NetworkX graph objects.""" def __init__(self, node_type=str, edge_key_type=int): try: import xml.etree.ElementTree except ImportError: msg = 'GraphML reader requires xml.elementtree.ElementTree' raise ImportError(msg) self.node_type = node_type self.edge_key_type = edge_key_type self.multigraph = False # assume multigraph and test for multiedges self.edge_ids = {} # dict mapping (u,v) tuples to id edge attributes def __call__(self, path=None, string=None): if path is not None: self.xml = ElementTree(file=path) elif string is not None: self.xml = fromstring(string) else: raise ValueError("Must specify either 'path' or 'string' as kwarg") (keys, defaults) = self.find_graphml_keys(self.xml) for g in self.xml.findall("{%s}graph" % self.NS_GRAPHML): yield self.make_graph(g, keys, defaults) def make_graph(self, graph_xml, graphml_keys, defaults, G=None): # set default graph type edgedefault = graph_xml.get("edgedefault", None) if G is None: if edgedefault == 'directed': G = nx.MultiDiGraph() else: G = nx.MultiGraph() # set defaults for graph attributes G.graph['node_default'] = {} G.graph['edge_default'] = {} for key_id, value in defaults.items(): key_for = graphml_keys[key_id]['for'] name = graphml_keys[key_id]['name'] python_type = graphml_keys[key_id]['type'] if key_for == 'node': G.graph['node_default'].update({name: python_type(value)}) if key_for == 'edge': G.graph['edge_default'].update({name: python_type(value)}) # hyperedges are not supported hyperedge = graph_xml.find("{%s}hyperedge" % self.NS_GRAPHML) if hyperedge is not None: raise nx.NetworkXError("GraphML reader doesn't support hyperedges") # add nodes for node_xml in graph_xml.findall("{%s}node" % self.NS_GRAPHML): self.add_node(G, node_xml, graphml_keys, defaults) # add edges for edge_xml in graph_xml.findall("{%s}edge" % self.NS_GRAPHML): self.add_edge(G, edge_xml, graphml_keys) # add graph data data = self.decode_data_elements(graphml_keys, graph_xml) G.graph.update(data) # switch to Graph or DiGraph if no parallel edges were found. if not self.multigraph: if G.is_directed(): G = nx.DiGraph(G) else: G = nx.Graph(G) nx.set_edge_attributes(G, values=self.edge_ids, name='id') return G def add_node(self, G, node_xml, graphml_keys, defaults): """Add a node to the graph. """ # warn on finding unsupported ports tag ports = node_xml.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # find the node by id and cast it to the appropriate type node_id = self.node_type(node_xml.get("id")) # get data/attributes for node data = self.decode_data_elements(graphml_keys, node_xml) G.add_node(node_id, **data) # get child nodes if node_xml.attrib.get('yfiles.foldertype') == 'group': graph_xml = node_xml.find("{%s}graph" % self.NS_GRAPHML) self.make_graph(graph_xml, graphml_keys, defaults, G) def add_edge(self, G, edge_element, graphml_keys): """Add an edge to the graph. """ # warn on finding unsupported ports tag ports = edge_element.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # raise error if we find mixed directed and undirected edges directed = edge_element.get("directed") if G.is_directed() and directed == 'false': msg = "directed=false edge found in directed graph." raise nx.NetworkXError(msg) if (not G.is_directed()) and directed == 'true': msg = "directed=true edge found in undirected graph." raise nx.NetworkXError(msg) source = self.node_type(edge_element.get("source")) target = self.node_type(edge_element.get("target")) data = self.decode_data_elements(graphml_keys, edge_element) # GraphML stores edge ids as an attribute # NetworkX uses them as keys in multigraphs too if no key # attribute is specified edge_id = edge_element.get("id") if edge_id: # self.edge_ids is used by `make_graph` method for non-multigraphs self.edge_ids[source, target] = edge_id try: edge_id = self.edge_key_type(edge_id) except ValueError: # Could not convert. pass else: edge_id = data.get('key') if G.has_edge(source, target): # mark this as a multigraph self.multigraph = True # Use add_edges_from to avoid error with add_edge when `'key' in data` G.add_edges_from([(source, target, edge_id, data)]) def decode_data_elements(self, graphml_keys, obj_xml): """Use the key information to decode the data XML if present.""" data = {} for data_element in obj_xml.findall("{%s}data" % self.NS_GRAPHML): key = data_element.get("key") try: data_name = graphml_keys[key]['name'] data_type = graphml_keys[key]['type'] except KeyError: raise nx.NetworkXError("Bad GraphML data: no key %s" % key) text = data_element.text # assume anything with subelements is a yfiles extension if text is not None and len(list(data_element)) == 0: if data_type == bool: # Ignore cases. # http://docs.oracle.com/javase/6/docs/api/java/lang/ # Boolean.html#parseBoolean%28java.lang.String%29 data[data_name] = self.convert_bool[text.lower()] else: data[data_name] = data_type(text) elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: pref = "{%s}%s/{%s}" % (self.NS_Y, node_type, self.NS_Y) geometry = data_element.find("%sGeometry" % pref) if geometry is not None: data['x'] = geometry.get('x') data['y'] = geometry.get('y') if node_label is None: node_label = data_element.find("%sNodeLabel" % pref) if node_label is not None: data['label'] = node_label.text # check all the diffrent types of edges avaivable in yEd. for e in ['PolyLineEdge', 'SplineEdge', 'QuadCurveEdge', 'BezierEdge', 'ArcEdge']: pref = "{%s}%s/{%s}" % (self.NS_Y, e, self.NS_Y) edge_label = data_element.find("%sEdgeLabel" % pref) if edge_label is not None: break if edge_label is not None: data['label'] = edge_label.text return data def find_graphml_keys(self, graph_element): """Extracts all the keys and key defaults from the xml. """ graphml_keys = {} graphml_key_defaults = {} for k in graph_element.findall("{%s}key" % self.NS_GRAPHML): attr_id = k.get("id") attr_type = k.get('attr.type') attr_name = k.get("attr.name") yfiles_type = k.get("yfiles.type") if yfiles_type is not None: attr_name = yfiles_type attr_type = 'yfiles' if attr_type is None: attr_type = "string" warnings.warn("No key type for id %s. Using string" % attr_id) if attr_name is None: raise nx.NetworkXError("Unknown key for id %s." % attr_id) graphml_keys[attr_id] = {"name": attr_name, "type": self.python_type[attr_type], "for": k.get("for")} # check for "default" subelement of key element default = k.find("{%s}default" % self.NS_GRAPHML) if default is not None: graphml_key_defaults[attr_id] = default.text return graphml_keys, graphml_key_defaults
class ServiceProvidersParserTest(unittest.TestCase): def setUp(self): self.tree = ElementTree(file=PROVIDERS_PATH) self.countries_from_xml = self.tree.findall('country') self.db = ServiceProvidersParser() self.countries_from_class = self.db.get_countries() def test_get_countries(self): for country in self.countries_from_class: self.assertEqual(country.tag, 'country') def test_get_country_idx_by_code(self): for idx, country in enumerate(self.countries_from_class): country_code = country.attrib['code'] country_idx = self.db.get_country_idx_by_code(country_code) self.assertEqual(idx, country_idx) def test_get_country_name_by_idx(self): for idx, country in enumerate(self.countries_from_class): country_code = country.attrib['code'] self.assertEqual(CountryCodeParser().get(country_code), self.db.get_country_name_by_idx(idx)) def test_get_providers(self): for country_idx, country in enumerate(self.countries_from_class): providers = self.db.get_providers(country_idx) for provider in providers: self.assertEqual(provider.tag, 'provider') self.assertIsNotNone(provider.find('.//gsm')) def test_get_plans(self): for country_idx, country in enumerate(self.countries_from_class): providers = self.db.get_providers(country_idx) for provider_idx, provider in enumerate(providers): plans = self.db.get_plans(country_idx, provider_idx) for plan in plans: self.assertEqual(plan.tag, 'apn') def get_providers(self, country_xml): """Given a country element find all provider with a gsm tag.""" idx = 0 for provider in country_xml.findall('provider'): if provider.find('.//gsm'): yield idx, provider idx = idx + 1 def get_plans(self, provider_xml): """Given a provider element find all apn elements.""" for idx, plan in enumerate(provider_xml.findall('.//apn')): yield idx, plan def test_get_some_specific_values(self): for country in self.countries_from_xml: country_code = country.attrib['code'] country_idx = self.db.get_country_idx_by_code(country_code) for provider_idx, provider in self.get_providers(country): plans_from_class = self.db.get_plans(country_idx, provider_idx) for plan_idx, plan in self.get_plans(provider): plan_from_class = plans_from_class[plan_idx] self.assertEqual(plan.attrib['value'], plan_from_class.attrib['value'])
class XMLTV: def __init__(self): self.logger = logging.getLogger(__name__) self._tree = ElementTree() self._tree._setroot(Element('tv')) self._loaded_channels = [] def get_loaded_channels(self): return self._loaded_channels def parse_xmltv_file(self, filename, channel_list): """ Process given xmltv file and create xml tree for our channel_list """ self.logger.debug("Start <%s> parsing>", filename) if filename.endswith('gz'): import gzip open_func = gzip.open else: open_func = open with open_func(filename) as fp: for event, elem in iterparse(fp): if elem.tag == 'channel': if elem.attrib['id'] in channel_list: self.logger.debug("Add <%s> channel element", elem.attrib['id']) self._tree.getroot().append(elem) if elem.attrib['id'] not in self._loaded_channels: self._loaded_channels.append(elem.attrib['id']) else: elem.clear() elif elem.tag == 'programme': if elem.attrib['channel'] in channel_list: self._tree.getroot().append(elem) else: elem.clear() self.logger.debug('File parsing complete!') @classmethod def parse_date_tz(cls, date_str): """ Parse date like this: '20130429073000 +0300' (python libs unable to process timezone info in +0300 format) Most code was taken from email.util package :return: parsed datetime object """ tz = date_str[-5:] date_str_notz = date_str[:-6] tz_offset = int(tz) # Convert a timezone offset into seconds ; -0500 -> -18000 if tz_offset: if tz_offset < 0: tz_sign = -1 tz_offset = -tz_offset else: tz_sign = 1 tz_offset = tz_sign * ((tz_offset // 100) * 3600 + (tz_offset % 100) * 60) time = datetime.strptime(date_str_notz, xmltv.date_format_notz) delta = timedelta(seconds=tz_offset) time -= delta return time def parse_programme(self, elem): """ Convert programme element to dictionary """ programme = xmltv.elem_to_programme(elem) programme['start_timestamp'] = calendar.timegm(XMLTV.parse_date_tz(programme['start']).utctimetuple()) programme['stop_timestamp'] = calendar.timegm(XMLTV.parse_date_tz(programme['stop']).utctimetuple()) self.logger.debug("Programme: %s", programme) return programme def get_tv_schedule(self, channel_name=None): """ Get generator object for tv schedule of given channel_name or for all loaded channels """ if channel_name is not None: filter_str = 'programme[@channel="%s"]' % channel_name else: filter_str = 'programme' for elem in self._tree.findall(filter_str): yield self.parse_programme(elem) def send_clear_channel_epg(self, channels_id, svdrp): """ Send to VDR clear channel EPG command for provided channels entries """ for channel_entry in channels_id: self.logger.info('Clear EPG for channel %s (%s)', channel_entry['name'], channel_entry['id']) svdrp_response = svdrp.send_command('CLRE %s' % channel_entry['id']) self.logger.debug('SVDRP Response: %s', svdrp_response) def check_upload_result(self, upload_responses_list): """ Process received VDR response on EPG upload command """ if len(upload_responses_list) != 1: self.logger.error('Invalid SVDRP Response: %s', upload_responses_list) return upload_response = upload_responses_list[0] if upload_response.code == 250: self.logger.info('EPG uploaded successfully') else: self.logger.error('EPG uploaded unsuccessfully, response: %s', upload_response) def process_tv_schedule(self, channels_map, svdrp): """ Process XMLTV tree and upload EPG to VDR """ timestamp_utc_now = get_timestamp_utc_now() svdrp.start_conversation() for channel_name in self.get_loaded_channels(): epg_channels = channels_map[channel_name] self.logger.info("Load <%s> to %s", channel_name, epg_channels) current_channel_id = None self.send_clear_channel_epg(epg_channels, svdrp) self.logger.info('Start EPG upload') svdrp_response = svdrp.send_command('PUTE') self.logger.debug('SVDRP Response: %s', svdrp_response) for prg in self.get_tv_schedule(channel_name): if prg['stop_timestamp'] < timestamp_utc_now: #skip old entry continue for channel_entry in epg_channels: vdr_channel_id = channel_entry['id'] vdr_channel_name = channel_entry['name'] if current_channel_id is None or current_channel_id != vdr_channel_id: if current_channel_id is not None: #finish previous channel entries svdrp.send('c') #start new channel svdrp.send('C %s %s' % (vdr_channel_id, vdr_channel_name)) current_channel_id = vdr_channel_id #start EPG entry svdrp.send('E %(event_id)s %(start_time)d %(duration)d' % { 'event_id': prg['start_timestamp'], 'start_time': prg['start_timestamp'], 'duration': prg['stop_timestamp']-prg['start_timestamp'] }) if 'title' in prg: svdrp.send('T %s' % prg['title'][0][0].replace('\\n', '|')) if 'sub-title' in prg: svdrp.send('S %s' % prg['sub-title'][0][0].replace('\\n', '|')) if 'desc' in prg: svdrp.send('D %s' % prg['desc'][0][0].replace('\\n', '|')) #end entry svdrp.send('e') else: if current_channel_id is not None: svdrp.send('c') svdrp_response = svdrp.send_command('.') self.check_upload_result(svdrp_response) self.logger.debug('SVDRP Response: %s', svdrp_response) self.logger.debug('Finish conversation with VDR') svdrp_response = svdrp.finish_conversation() self.logger.debug('SVDRP Response: %s', svdrp_response)
def dofindnew(self, original_request, collection, label="", other=False): hresult = "" uri = collection[0] if other: uri = self.manager.server_info.extrasubs(uri) skip = uri uri = "/".join(uri.split("/")[:-1]) + "/" else: skip = None possible_matches = set() req = request(self.manager) req.method = "PROPFIND" req.host = original_request.host req.port = original_request.port req.ruris.append(uri) req.ruri = uri req.headers["Depth"] = "1" if len(collection[1]): req.user = collection[1] if len(collection[2]): req.pswd = collection[2] req.data = data(self.manager) req.data.value = """<?xml version="1.0" encoding="utf-8" ?> <D:propfind xmlns:D="DAV:"> <D:prop> <D:getetag/> <D:getlastmodified/> </D:prop> </D:propfind> """ req.data.content_type = "text/xml" result, _ignore_resulttxt, response, respdata = self.dorequest(req, False, False, label="%s | %s" % (label, "FINDNEW")) if result and (response is not None) and (response.status == 207) and (respdata is not None): try: tree = ElementTree(file=StringIO(respdata)) except Exception: return hresult latest = 0 request_uri = req.getURI(self.manager.server_info) for response in tree.findall("{DAV:}response"): # Get href for this response href = response.findall("{DAV:}href") if len(href) != 1: return False, " Wrong number of DAV:href elements\n" href = href[0].text if href != request_uri and (not other or href != skip): # Get all property status propstatus = response.findall("{DAV:}propstat") for props in propstatus: # Determine status for this propstat status = props.findall("{DAV:}status") if len(status) == 1: statustxt = status[0].text status = False if statustxt.startswith("HTTP/1.1 ") and (len(statustxt) >= 10): status = (statustxt[9] == "2") else: status = False if status: # Get properties for this propstat prop = props.findall("{DAV:}prop") for el in prop: # Get properties for this propstat glm = el.findall("{DAV:}getlastmodified") if len(glm) != 1: continue value = glm[0].text value = rfc822.parsedate(value) value = time.mktime(value) if value > latest: possible_matches.clear() possible_matches.add(href) latest = value elif value == latest: possible_matches.add(href) elif not hresult: possible_matches.add(href) if len(possible_matches) == 1: hresult = possible_matches.pop() elif len(possible_matches) > 1: not_seen_before = possible_matches - self.previously_found if len(not_seen_before) == 1: hresult = not_seen_before.pop() if hresult: self.previously_found.add(hresult) return hresult
def verify(self, manager, uri, response, respdata, args): # If no hrefs requested, then assume none should come back okhrefs = args.get("okhrefs", []) nohrefs = args.get("nohrefs", []) badhrefs = args.get("badhrefs", []) statushrefs = {} for arg in args.keys(): try: code = int(arg) statushrefs.setdefault(code, []).append(args[arg]) except ValueError: pass count = args.get("count", []) totalcount = args.get("totalcount", []) responsecount = args.get("responsecount", []) prefix = args.get("prefix", []) ignoremissing = args.get("ignoremissing", None) if len(prefix): prefix = prefix[0] if prefix[0] != "-" else "" else: prefix = uri okhrefs = processHrefSubstitutions(okhrefs, prefix) nohrefs = processHrefSubstitutions(nohrefs, prefix) badhrefs = processHrefSubstitutions(badhrefs, prefix) count = [int(eval(i)) for i in count] totalcount = [int(eval(i)) for i in totalcount] responsecount = [int(eval(i)) for i in responsecount] if "okhrefs" in args or "nohrefs" in args or "badhrefs" in args: doOKBad = True elif statushrefs: doOKBad = False else: doOKBad = None # Process the multistatus response, extracting all hrefs # and comparing with the set defined for this test. Report any # mismatches. # Must have MULTISTATUS response code if response.status != 207: return False, " HTTP Status for Request: %d\n" % ( response.status, ) try: tree = ElementTree(file=StringIO(respdata)) except Exception: return False, " HTTP response is not valid XML: %s\n" % ( respdata, ) ok_status_hrefs = [] bad_status_hrefs = [] status_code_hrefs = {} for response in tree.findall("{DAV:}response"): # Get href for this response href = response.findall("{DAV:}href") if href is None or len(href) != 1: return False, " Incorrect/missing DAV:Href element in response" href = urllib.unquote(href[0].text).rstrip("/") # Verify status status = response.findall("{DAV:}status") if len(status) == 1: statustxt = status[0].text status = False if statustxt.startswith("HTTP/1.1 ") and (len(statustxt) >= 10): status = (statustxt[9] == "2") try: code = int(statustxt[9:12]) except ValueError: code = 0 else: propstatus = response.findall("{DAV:}propstat") if len(propstatus) > 0: statustxt = "OK" status = True else: status = False code = 0 if status: ok_status_hrefs.append(href) else: bad_status_hrefs.append(href) status_code_hrefs.setdefault(code, set()).add(href) ok_result_set = set(ok_status_hrefs) ok_test_set = set(okhrefs) no_test_set = set(nohrefs) bad_result_set = set(bad_status_hrefs) bad_test_set = set(badhrefs) result = True resulttxt = "" # Check for count if len(count) == 1: if len(ok_result_set) != count[0] + 1: result = False resulttxt += " %d items returned, but %d items expected" % ( len(ok_result_set) - 1, count[0], ) return result, resulttxt # Check for total count if len(totalcount) > 0: # Add the 2nd value to the 1st if it exists if len(totalcount) == 2: totalcount[0] += totalcount[1] if len(ok_result_set) != totalcount[0]: result = False resulttxt += " %d items returned, but %d items expected" % ( len(ok_result_set), totalcount[0], ) return result, resulttxt # Check for response count if len(responsecount) == 1: responses = len(ok_result_set) + len(bad_result_set) if responses != responsecount[0]: result = False resulttxt += " %d responses returned, but %d responses expected" % ( responses, responsecount[0], ) return result, resulttxt if doOKBad: # Now do set difference ok_missing = ok_test_set.difference(ok_result_set) ok_extras = ok_result_set.difference( ok_test_set) if ignoremissing is None else set() no_extras = ok_result_set.intersection(no_test_set) bad_missing = bad_test_set.difference(bad_result_set) bad_extras = bad_result_set.difference( bad_test_set) if ignoremissing is None else set() if len(ok_missing) + len(ok_extras) + len(no_extras) + len( bad_missing) + len(bad_extras) != 0: if len(ok_missing) != 0: l = list(ok_missing) resulttxt += " %d Items not returned in report (OK):" % ( len(ok_missing), ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(ok_extras) != 0: l = list(ok_extras) resulttxt += " %d Unexpected items returned in report (OK):" % ( len(ok_extras), ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(no_extras) != 0: l = list(no_extras) resulttxt += " %d Unwanted items returned in report (OK):" % ( len(no_extras), ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(bad_missing) != 0: l = list(bad_missing) resulttxt += " %d Items not returned in report (BAD):" % ( len(bad_missing), ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(bad_extras) != 0: l = list(bad_extras) resulttxt += " %d Unexpected items returned in report (BAD):" % ( len(bad_extras), ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False if not doOKBad: l = list(set(statushrefs.keys()) - set(status_code_hrefs.keys())) if l: resulttxt += " %d Status Codes not returned in report:" % ( len(l), ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False l = list(set(status_code_hrefs.keys()) - set(statushrefs.keys())) if l: resulttxt += " %d Unexpected Status Codes returned in report:" % ( len(l), ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False for code in set(statushrefs.keys()) & set( status_code_hrefs.keys()): l = list(set(*statushrefs[code]) - status_code_hrefs[code]) if l: resulttxt += " %d Items not returned in report for %d:" % ( len(l), code, ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False l = list(status_code_hrefs[code] - set(*statushrefs[code])) if l: resulttxt += " %d Unexpected items returned in report for %d:" % ( len(l), code, ) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False return result, resulttxt
class GraphMLReader(GraphML): """Read a GraphML document. Produces NetworkX graph objects. """ def __init__(self, node_type=str): try: import xml.etree.ElementTree except ImportError: raise ImportError('GraphML reader requires ' 'xml.elementtree.ElementTree') self.node_type = node_type self.multigraph = False # assume multigraph and test for parallel edges def __call__(self, stream): self.xml = ElementTree(file=stream) (keys, defaults) = self.find_graphml_keys(self.xml) for g in self.xml.findall("{%s}graph" % self.NS_GRAPHML): yield self.make_graph(g, keys, defaults) def make_graph(self, graph_xml, graphml_keys, defaults): # set default graph type edgedefault = graph_xml.get("edgedefault", None) if edgedefault == 'directed': G = nx.MultiDiGraph() else: G = nx.MultiGraph() # set defaults for graph attributes for key_id, value in defaults.items(): key_for = graphml_keys[key_id]['for'] name = graphml_keys[key_id]['name'] python_type = graphml_keys[key_id]['type'] if key_for == 'node': G.graph['node_default'] = {name: python_type(value)} if key_for == 'edge': G.graph['edge_default'] = {name: python_type(value)} # hyperedges are not supported hyperedge = graph_xml.find("{%s}hyperedge" % self.NS_GRAPHML) if hyperedge is not None: raise nx.NetworkXError( "GraphML reader does not support hyperedges") # add nodes for node_xml in graph_xml.findall("{%s}node" % self.NS_GRAPHML): self.add_node(G, node_xml, graphml_keys) # add edges for edge_xml in graph_xml.findall("{%s}edge" % self.NS_GRAPHML): self.add_edge(G, edge_xml, graphml_keys) # add graph data data = self.decode_data_elements(graphml_keys, graph_xml) G.graph.update(data) # switch to Graph or DiGraph if no parallel edges were found. if not self.multigraph: if G.is_directed(): return nx.DiGraph(G) else: return nx.Graph(G) else: return G def add_node(self, G, node_xml, graphml_keys): """Add a node to the graph. """ # warn on finding unsupported ports tag ports = node_xml.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # find the node by id and cast it to the appropriate type node_id = self.node_type(node_xml.get("id")) # get data/attributes for node data = self.decode_data_elements(graphml_keys, node_xml) G.add_node(node_id, data) def add_edge(self, G, edge_element, graphml_keys): """Add an edge to the graph. """ # warn on finding unsupported ports tag ports = edge_element.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # raise error if we find mixed directed and undirected edges directed = edge_element.get("directed") if G.is_directed() and directed == 'false': raise nx.NetworkXError(\ "directed=false edge found in directed graph.") if (not G.is_directed()) and directed == 'true': raise nx.NetworkXError(\ "directed=true edge found in undirected graph.") source = self.node_type(edge_element.get("source")) target = self.node_type(edge_element.get("target")) data = self.decode_data_elements(graphml_keys, edge_element) # GraphML stores edge ids as an attribute # NetworkX uses them as keys in multigraphs too if no key # attribute is specified edge_id = edge_element.get("id") if edge_id: data["id"] = edge_id if G.has_edge(source, target): # mark this as a multigraph self.multigraph = True if edge_id is None: # no id specified, try using 'key' attribute as id edge_id = data.pop('key', None) G.add_edge(source, target, key=edge_id, **data) def decode_data_elements(self, graphml_keys, obj_xml): """Use the key information to decode the data XML if present.""" data = {} for data_element in obj_xml.findall("{%s}data" % self.NS_GRAPHML): key = data_element.get("key") try: data_name = graphml_keys[key]['name'] data_type = graphml_keys[key]['type'] except KeyError: raise nx.NetworkXError("Bad GraphML data: no key %s" % key) text = data_element.text # assume anything with subelements is a yfiles extension if text is not None and len(list(data_element)) == 0: if data_type == bool: data[data_name] = self.convert_bool[text] else: data[data_name] = data_type(text) elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: if node_label is None: node_label = data_element.find( "{%s}%s/{%s}NodeLabel" % (self.NS_Y, node_type, self.NS_Y)) if node_label is not None: data['label'] = node_label.text edge_label = data_element.find( "{%s}PolyLineEdge/{%s}EdgeLabel" % (self.NS_Y, (self.NS_Y))) if edge_label is not None: data['label'] = edge_label.text return data def find_graphml_keys(self, graph_element): """Extracts all the keys and key defaults from the xml. """ graphml_keys = {} graphml_key_defaults = {} for k in graph_element.findall("{%s}key" % self.NS_GRAPHML): attr_id = k.get("id") attr_type = k.get('attr.type') attr_name = k.get("attr.name") if attr_type is None: attr_name = k.get('yfiles.type') attr_type = 'yfiles' if attr_name is None: raise nx.NetworkXError("Unknown key type in file.") graphml_keys[attr_id] = { "name": attr_name, "type": self.python_type[attr_type], "for": k.get("for") } # check for "default" subelement of key element default = k.find("{%s}default" % self.NS_GRAPHML) if default is not None: graphml_key_defaults[attr_id] = default.text return graphml_keys, graphml_key_defaults
def verify(self, manager, uri, response, respdata, args): #@UnusedVariable # If no status verification requested, then assume all 2xx codes are OK ignores = args.get("ignore", []) only = args.get("only", []) # Check how many responses are returned counts = args.get("count", []) if len(counts) == 1: count = int(eval(counts[0])) else: count = None # Check how many responses are returned roots = args.get("root-element", []) if len(roots) == 1: root = roots[0] else: root = "{DAV:}multistatus" # Check how many responses are returned status = args.get("status", []) if len(status) == 1: status = int(status[0]) else: status = 207 def normalizeXML(value): if value[0] == '<': try: tree = ElementTree(file=StringIO(value)) except Exception: return False, " Could not parse XML value: %s\n" % (value,) value = tostring(tree.getroot()) return value # Get property arguments and split on $ delimited for name, value tuples okprops = args.get("okprops", []) ok_props_match = [] okprops_nomatch = {} for i in range(len(okprops)): p = okprops[i] if (p.find("$") != -1): if p.find("$") != len(p) - 1: ok_props_match.append((p.split("$")[0], normalizeXML(p.split("$")[1]))) else: ok_props_match.append((p.split("$")[0], None)) elif (p.find("!") != -1): if p.find("!") != len(p) - 1: okprops_nomatch[p.split("!")[0]] = normalizeXML(p.split("!")[1]) else: okprops_nomatch[p.split("!")[0]] = None else: ok_props_match.append((p, None)) badprops = args.get("badprops", []) for i in range(len(badprops)): p = badprops[i] if p.find("$") != -1: badprops[i] = (p.split("$")[0], normalizeXML(p.split("$")[1])) else: badprops[i] = (p, None) ok_test_set = set(ok_props_match) bad_test_set = set(badprops) # Process the multistatus response, extracting all hrefs # and comparing with the set defined for this test. Report any # mismatches. # Must have MULTISTATUS response code if response.status != status: return False, " HTTP Status for Request: %d\n" % (response.status,) # Read in XML try: tree = ElementTree(file=StringIO(respdata)) except Exception: return False, " Could not parse proper XML response\n" # Test root element if tree.getroot().tag != root: return False, " Invalid root-element specified: %s\n" % (root,) result = True resulttxt = "" ctr = 0 for response in tree.findall("{DAV:}response"): # Get href for this response href = response.find("{DAV:}href") if href is None: return False, " Wrong number of DAV:href elements\n" href = urllib.unquote(href.text) if href in ignores: continue if only and href not in only: continue if count is not None: ctr += 1 continue # Get all property status ok_status_props = [] bad_status_props = [] propstatus = response.findall("{DAV:}propstat") for props in propstatus: # Determine status for this propstat status = props.find("{DAV:}status") if status is not None: statustxt = status.text status = False if statustxt.startswith("HTTP/1.1 ") and (len(statustxt) >= 10): status = (statustxt[9] == "2") else: status = False # Get properties for this propstat prop = props.find("{DAV:}prop") for child in prop.getchildren(): fqname = child.tag if len(child): # Copy sub-element data as text into one long string and strip leading/trailing space value = "" for p in child.getchildren(): temp = tostring(p) temp = temp.strip() value += temp if status: if (fqname, None,) in ok_test_set: value = None else: if (fqname, None,) in bad_test_set: value = None elif child.text: value = child.text if status: if (fqname, None,) in ok_test_set: value = None else: if (fqname, None,) in bad_test_set: value = None else: value = None if status: ok_status_props.append((fqname, value,)) else: bad_status_props.append((fqname, value,)) ok_result_set = set(ok_status_props) bad_result_set = set(bad_status_props) # Now do set difference ok_missing = ok_test_set.difference(ok_result_set) ok_extras = ok_result_set.difference(ok_test_set) bad_missing = bad_test_set.difference(bad_result_set) bad_extras = bad_result_set.difference(bad_test_set) # Now remove extras that are in the no-match set for name, value in [p for p in ok_extras]: if name in okprops_nomatch and okprops_nomatch[name] != value: ok_extras.remove((name, value)) if len(ok_missing) + len(ok_extras) + len(bad_missing) + len(bad_extras) != 0: if len(ok_missing) != 0: l = list(ok_missing) resulttxt += " Items not returned in report (OK) for %s:" % href for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(ok_extras) != 0: l = list(ok_extras) resulttxt += " Unexpected items returned in report (OK) for %s:" % href for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(bad_missing) != 0: l = list(bad_missing) resulttxt += " Items not returned in report (BAD) for %s:" % href for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(bad_extras) != 0: l = list(bad_extras) resulttxt += " Unexpected items returned in report (BAD) for %s:" % href for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False if count is not None and count != ctr: result = False resulttxt = " Expected %d response items but got %d." % (count, ctr,) return result, resulttxt
def parse_job_report(fd): ''' Return a dictionary containing the input and output parameters ''' filename = fd.name output = { # Input 'job_reports': [filename], 'input_files': [], 'input_run_lumis': set([]), 'events_read': 0, # Output 'output_files': [], 'output_run_lumis': set([]), 'events_written': 0, 'ok': True, 'bad_files': [], } tree = ElementTree() try: tree.parse(fd) except: output['ok'] = False output['bad_files'] = [filename] return output # Loop over input files for input_file in tree.findall('InputFile'): filename = input_file.find('PFN').text output['input_files'].append(filename) events = int(input_file.find('EventsRead').text) output['events_read'] += events runs = input_file.find('Runs') for run in runs.findall('Run'): run_number = int(run.get('ID')) # Get the lumi list for this run lumi_list = output['input_run_lumis'] lumi_list.update((run_number, int(lumi.get('ID'))) for lumi in run.findall('LumiSection')) # Loop over output files for output_file in tree.findall('File'): filename = output_file.find('PFN').text output['output_files'].append(filename) events = int(output_file.find('TotalEvents').text) output['events_written'] += events runs = output_file.find('Runs') for run in runs.findall('Run'): run_number = int(run.get('ID')) # Get the lumi list for this run lumi_list = output['output_run_lumis'] lumi_list.update((run_number, int(lumi.get('ID'))) for lumi in run.findall('LumiSection')) # Do some sanity checks #if output['output_run_lumis'] != output['input_run_lumis']: #raise RunLumiCorruptionError( #filename, output['input_run_lumis'], output['output_run_lumis']) # Return info dictionary return output
def dofindnew(self, collection, label="", other=False): hresult = "" uri = collection[0] if other: uri = self.manager.server_info.extrasubs(uri) skip = uri uri = "/".join(uri.split("/")[:-1]) + "/" else: skip = None possible_matches = set() req = request(self.manager) req.method = "PROPFIND" req.ruris.append(uri) req.ruri = uri req.headers["Depth"] = "1" if len(collection[1]): req.user = collection[1] if len(collection[2]): req.pswd = collection[2] req.data = data(self.manager) req.data.value = """<?xml version="1.0" encoding="utf-8" ?> <D:propfind xmlns:D="DAV:"> <D:prop> <D:getetag/> <D:getlastmodified/> </D:prop> </D:propfind> """ req.data.content_type = "text/xml" result, _ignore_resulttxt, response, respdata = self.dorequest(req, False, False, label="%s | %s" % (label, "FINDNEW")) if result and (response is not None) and (response.status == 207) and (respdata is not None): try: tree = ElementTree(file=StringIO(respdata)) except Exception: return hresult latest = 0 request_uri = req.getURI(self.manager.server_info) for response in tree.findall("{DAV:}response"): # Get href for this response href = response.findall("{DAV:}href") if len(href) != 1: return False, " Wrong number of DAV:href elements\n" href = href[0].text if href != request_uri and (not other or href != skip): # Get all property status propstatus = response.findall("{DAV:}propstat") for props in propstatus: # Determine status for this propstat status = props.findall("{DAV:}status") if len(status) == 1: statustxt = status[0].text status = False if statustxt.startswith("HTTP/1.1 ") and (len(statustxt) >= 10): status = (statustxt[9] == "2") else: status = False if status: # Get properties for this propstat prop = props.findall("{DAV:}prop") for el in prop: # Get properties for this propstat glm = el.findall("{DAV:}getlastmodified") if len(glm) != 1: continue value = glm[0].text value = rfc822.parsedate(value) value = time.mktime(value) if value > latest: possible_matches.clear() possible_matches.add(href) latest = value elif value == latest: possible_matches.add(href) elif not hresult: possible_matches.add(href) if len(possible_matches) == 1: hresult = possible_matches.pop() elif len(possible_matches) > 1: not_seen_before = possible_matches - self.previously_found if len(not_seen_before) == 1: hresult = not_seen_before.pop() if hresult: self.previously_found.add(hresult) return hresult
def verify(self, manager, uri, response, respdata, args): # If no hrefs requested, then assume none should come back okhrefs = args.get("okhrefs", []) nohrefs = args.get("nohrefs", []) badhrefs = args.get("badhrefs", []) statushrefs = {} for arg in args.keys(): try: code = int(arg) statushrefs.setdefault(code, []).append(args[arg]) except ValueError: pass count = args.get("count", []) totalcount = args.get("totalcount", []) responsecount = args.get("responsecount", []) prefix = args.get("prefix", []) ignoremissing = args.get("ignoremissing", None) if len(prefix): prefix = prefix[0] if prefix[0] != "-" else "" else: prefix = uri okhrefs = processHrefSubstitutions(okhrefs, prefix) nohrefs = processHrefSubstitutions(nohrefs, prefix) badhrefs = processHrefSubstitutions(badhrefs, prefix) count = [int(eval(i)) for i in count] totalcount = [int(eval(i)) for i in totalcount] responsecount = [int(eval(i)) for i in responsecount] if "okhrefs" in args or "nohrefs" in args or "badhrefs" in args: doOKBad = True elif statushrefs: doOKBad = False else: doOKBad = None # Process the multistatus response, extracting all hrefs # and comparing with the set defined for this test. Report any # mismatches. # Must have MULTISTATUS response code if response.status != 207: return False, " HTTP Status for Request: %d\n" % (response.status,) try: tree = ElementTree(file=StringIO(respdata)) except Exception: return False, " HTTP response is not valid XML: %s\n" % (respdata,) ok_status_hrefs = [] bad_status_hrefs = [] status_code_hrefs = {} for response in tree.findall("{DAV:}response"): # Get href for this response href = response.findall("{DAV:}href") if href is None or len(href) != 1: return False, " Incorrect/missing DAV:Href element in response" href = urllib.unquote(href[0].text).rstrip("/") # Verify status status = response.findall("{DAV:}status") if len(status) == 1: statustxt = status[0].text status = False if statustxt.startswith("HTTP/1.1 ") and (len(statustxt) >= 10): status = (statustxt[9] == "2") try: code = int(statustxt[9:12]) except ValueError: code = 0 else: propstatus = response.findall("{DAV:}propstat") if len(propstatus) > 0: statustxt = "OK" status = True else: status = False code = 0 if status: ok_status_hrefs.append(href) else: bad_status_hrefs.append(href) status_code_hrefs.setdefault(code, set()).add(href) ok_result_set = set(ok_status_hrefs) ok_test_set = set(okhrefs) no_test_set = set(nohrefs) bad_result_set = set(bad_status_hrefs) bad_test_set = set(badhrefs) result = True resulttxt = "" # Check for count if len(count) == 1: if len(ok_result_set) != count[0] + 1: result = False resulttxt += " %d items returned, but %d items expected" % (len(ok_result_set) - 1, count[0],) return result, resulttxt # Check for total count if len(totalcount) > 0: # Add the 2nd value to the 1st if it exists if len(totalcount) == 2: totalcount[0] += totalcount[1] if len(ok_result_set) != totalcount[0]: result = False resulttxt += " %d items returned, but %d items expected" % (len(ok_result_set), totalcount[0],) return result, resulttxt # Check for response count if len(responsecount) == 1: responses = len(ok_result_set) + len(bad_result_set) if responses != responsecount[0]: result = False resulttxt += " %d responses returned, but %d responses expected" % (responses, responsecount[0],) return result, resulttxt if doOKBad: # Now do set difference ok_missing = ok_test_set.difference(ok_result_set) ok_extras = ok_result_set.difference(ok_test_set) if ignoremissing is None else set() no_extras = ok_result_set.intersection(no_test_set) bad_missing = bad_test_set.difference(bad_result_set) bad_extras = bad_result_set.difference(bad_test_set) if ignoremissing is None else set() if len(ok_missing) + len(ok_extras) + len(no_extras) + len(bad_missing) + len(bad_extras) != 0: if len(ok_missing) != 0: l = list(ok_missing) resulttxt += " %d Items not returned in report (OK):" % (len(ok_missing),) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(ok_extras) != 0: l = list(ok_extras) resulttxt += " %d Unexpected items returned in report (OK):" % (len(ok_extras),) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(no_extras) != 0: l = list(no_extras) resulttxt += " %d Unwanted items returned in report (OK):" % (len(no_extras),) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(bad_missing) != 0: l = list(bad_missing) resulttxt += " %d Items not returned in report (BAD):" % (len(bad_missing),) for i in l: resulttxt += " " + str(i) resulttxt += "\n" if len(bad_extras) != 0: l = list(bad_extras) resulttxt += " %d Unexpected items returned in report (BAD):" % (len(bad_extras),) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False if not doOKBad: l = list(set(statushrefs.keys()) - set(status_code_hrefs.keys())) if l: resulttxt += " %d Status Codes not returned in report:" % (len(l),) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False l = list(set(status_code_hrefs.keys()) - set(statushrefs.keys())) if l: resulttxt += " %d Unexpected Status Codes returned in report:" % (len(l),) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False for code in set(statushrefs.keys()) & set(status_code_hrefs.keys()): l = list(set(*statushrefs[code]) - status_code_hrefs[code]) if l: resulttxt += " %d Items not returned in report for %d:" % (len(l), code,) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False l = list(status_code_hrefs[code] - set(*statushrefs[code])) if l: resulttxt += " %d Unexpected items returned in report for %d:" % (len(l), code,) for i in l: resulttxt += " " + str(i) resulttxt += "\n" result = False return result, resulttxt
try: tree.parse(input_xml) except: fail("could not parse XML input from '%s'" % input_xml) # function: return value corresponding to 'name' in SWIG XML <attributelist> def get_swig_attr(elem, name): if elem == None: return None for attr_elem in elem.findall('attributelist/attribute'): if attr_elem.get("name", "") == name: return attr_elem.get('value') return None # find root of preprocessing interface parse tree for incl_elem in tree.findall('include'): if get_swig_attr(incl_elem.find('module'), 'name') == 'swiglal_preproc': root = incl_elem break else: fail('could not find root of preprocessing interface parse tree') # function: get list of interface headers, ordered by #include precedence def get_header_list(elem): for incl_elem in elem.findall('include'): # find header filename header_name = get_swig_attr(incl_elem, 'name') if header_name == None: fail('could not find filename of header file')
class GraphMLReader(GraphML): """Read a GraphML document. Produces NetworkX graph objects. """ def __init__(self, node_type=str): try: import xml.etree.ElementTree except ImportError: raise ImportError('GraphML reader requires ' 'xml.elementtree.ElementTree') self.node_type=node_type self.multigraph=False # assume multigraph and test for parallel edges def __call__(self, stream): self.xml = ElementTree(file=stream) (keys,defaults) = self.find_graphml_keys(self.xml) for g in self.xml.findall("{%s}graph" % self.NS_GRAPHML): yield self.make_graph(g, keys, defaults) def make_graph(self, graph_xml, graphml_keys, defaults): # set default graph type edgedefault = graph_xml.get("edgedefault", None) if edgedefault=='directed': G=nx.MultiDiGraph() else: G=nx.MultiGraph() # set defaults for graph attributes for key_id,value in defaults.items(): key_for=graphml_keys[key_id]['for'] name=graphml_keys[key_id]['name'] python_type=graphml_keys[key_id]['type'] if key_for=='node': G.graph['node_default']={name:python_type(value)} if key_for=='edge': G.graph['edge_default']={name:python_type(value)} # hyperedges are not supported hyperedge=graph_xml.find("{%s}hyperedge" % self.NS_GRAPHML) if hyperedge is not None: raise nx.NetworkXError("GraphML reader does not support hyperedges") # add nodes for node_xml in graph_xml.findall("{%s}node" % self.NS_GRAPHML): self.add_node(G, node_xml, graphml_keys) # add edges for edge_xml in graph_xml.findall("{%s}edge" % self.NS_GRAPHML): self.add_edge(G, edge_xml, graphml_keys) # add graph data data = self.decode_data_elements(graphml_keys, graph_xml) G.graph.update(data) # switch to Graph or DiGraph if no parallel edges were found. if not self.multigraph: if G.is_directed(): return nx.DiGraph(G) else: return nx.Graph(G) else: return G def add_node(self, G, node_xml, graphml_keys): """Add a node to the graph. """ # warn on finding unsupported ports tag ports=node_xml.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # find the node by id and cast it to the appropriate type node_id = self.node_type(node_xml.get("id")) # get data/attributes for node data = self.decode_data_elements(graphml_keys, node_xml) G.add_node(node_id, data) def add_edge(self, G, edge_element, graphml_keys): """Add an edge to the graph. """ # warn on finding unsupported ports tag ports=edge_element.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # raise error if we find mixed directed and undirected edges directed = edge_element.get("directed") if G.is_directed() and directed=='false': raise nx.NetworkXError(\ "directed=false edge found in directed graph.") if (not G.is_directed()) and directed=='true': raise nx.NetworkXError(\ "directed=true edge found in undirected graph.") source = self.node_type(edge_element.get("source")) target = self.node_type(edge_element.get("target")) data = self.decode_data_elements(graphml_keys, edge_element) # GraphML stores edge ids as an attribute # NetworkX uses them as keys in multigraphs too if no key # attribute is specified edge_id = edge_element.get("id") if edge_id: data["id"] = edge_id if G.has_edge(source,target): # mark this as a multigraph self.multigraph=True if edge_id is None: # no id specified, try using 'key' attribute as id edge_id=data.pop('key',None) G.add_edge(source, target, key=edge_id, **data) def decode_data_elements(self, graphml_keys, obj_xml): """Use the key information to decode the data XML if present.""" data = {} for data_element in obj_xml.findall("{%s}data" % self.NS_GRAPHML): key = data_element.get("key") try: data_name=graphml_keys[key]['name'] data_type=graphml_keys[key]['type'] except KeyError: raise nx.NetworkXError("Bad GraphML data: no key %s"%key) text=data_element.text # assume anything with subelements is a yfiles extension if text is not None and len(list(data_element))==0: if data_type==bool: data[data_name] = self.convert_bool[text] else: data[data_name] = data_type(text) elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: if node_label is None: node_label = data_element.find("{%s}%s/{%s}NodeLabel" % (self.NS_Y, node_type, self.NS_Y)) if node_label is not None: data['label'] = node_label.text edge_label = data_element.find("{%s}PolyLineEdge/{%s}EdgeLabel"% (self.NS_Y, (self.NS_Y))) if edge_label is not None: data['label'] = edge_label.text return data def find_graphml_keys(self, graph_element): """Extracts all the keys and key defaults from the xml. """ graphml_keys = {} graphml_key_defaults = {} for k in graph_element.findall("{%s}key" % self.NS_GRAPHML): attr_id = k.get("id") attr_type=k.get('attr.type') attr_name=k.get("attr.name") if attr_type is None: attr_name=k.get('yfiles.type') attr_type='yfiles' if attr_name is None: raise nx.NetworkXError("Unknown key type in file.") graphml_keys[attr_id] = { "name":attr_name, "type":self.python_type[attr_type], "for":k.get("for")} # check for "default" subelement of key element default=k.find("{%s}default" % self.NS_GRAPHML) if default is not None: graphml_key_defaults[attr_id]=default.text return graphml_keys,graphml_key_defaults
def dorequest(self, req, details=False, doverify=True, forceverify=False, stats=None, etags=None, label="", count=1): req.count = count if isinstance(req, pause): # Useful for pausing at a particular point print "Paused" sys.stdin.readline() return True, "", None, None if len(req.missingFeatures()) != 0: return True, "", None, None if len(req.excludedFeatures()) != 0: return True, "", None, None # Special check for DELETEALL if req.method == "DELETEALL": for ruri in req.ruris: collection = (ruri, req.user, req.pswd) hrefs = self.dofindall(collection, label="%s | %s" % (label, "DELETEALL")) if not self.dodeleteall(hrefs, label="%s | %s" % (label, "DELETEALL")): return False, "DELETEALL failed for: {r}".format(r=ruri), None, None return True, "", None, None # Special for delay elif req.method == "DELAY": # self.ruri contains a numeric delay in seconds delay = int(req.ruri) starttime = time.time() while (time.time() < starttime + delay): pass return True, "", None, None # Special for GETNEW elif req.method == "GETNEW": collection = (req.ruri, req.user, req.pswd) self.grabbedlocation = self.dofindnew(collection, label=label) if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) req.method = "GET" req.ruri = "$" # Special for FINDNEW elif req.method == "FINDNEW": collection = (req.ruri, req.user, req.pswd) self.grabbedlocation = self.dofindnew(collection, label=label) if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) return True, "", None, None # Special for GETOTHER elif req.method == "GETOTHER": collection = (req.ruri, req.user, req.pswd) self.grabbedlocation = self.dofindnew(collection, label=label, other=True) if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) req.method = "GET" req.ruri = "$" # Special check for WAITCOUNT elif req.method.startswith("WAITCOUNT"): count = int(req.method[10:]) for ruri in req.ruris: collection = (ruri, req.user, req.pswd) waitresult, waitdetails = self.dowaitcount(collection, count, label=label) if not waitresult: return False, "Count did not change: {w}".format(w=waitdetails), None, None else: return True, "", None, None # Special check for WAITDELETEALL elif req.method.startswith("WAITDELETEALL"): count = int(req.method[len("WAITDELETEALL"):]) for ruri in req.ruris: collection = (ruri, req.user, req.pswd) waitresult, waitdetails = self.dowaitcount(collection, count, label=label) if waitresult: hrefs = self.dofindall(collection, label="%s | %s" % (label, "DELETEALL")) self.dodeleteall(hrefs, label="%s | %s" % (label, "DELETEALL")) else: return False, "Count did not change: {w}".format(w=waitdetails), None, None else: return True, "", None, None result = True resulttxt = "" response = None respdata = None method = req.method uri = req.getURI(self.manager.server_info) if (uri == "$"): uri = self.grabbedlocation headers = req.getHeaders(self.manager.server_info) data = req.getData() # Cache delayed delete if req.end_delete: self.end_deletes.append((uri, req.user, req.pswd)) if details: resulttxt += " %s: %s\n" % (method, uri) # Special for GETCHANGED if req.method == "GETCHANGED": if not self.dowaitchanged( uri, etags[uri], req.user, req.pswd, label=label ): return False, "Resource did not change", None, None method = "GET" # Start request timer if required if stats: stats.startTimer() # Do the http request http = SmartHTTPConnection(req.host, req.port, self.manager.server_info.ssl) if 'User-Agent' not in headers and label is not None: headers['User-Agent'] = label.encode("utf-8") try: puri = list(urlparse.urlparse(uri)) if req.ruri_quote: puri[2] = urllib.quote(puri[2]) quri = urlparse.urlunparse(puri) http.request(method, quri, data, headers) response = http.getresponse() respdata = None respdata = response.read() finally: http.close() # Stop request timer before verification if stats: stats.endTimer() if doverify and (respdata != None): result, txt = self.verifyrequest(req, uri, response, respdata) resulttxt += txt elif forceverify: result = (response.status / 100 == 2) if not result: resulttxt += "Status Code Error: %d" % response.status if req.print_request or (self.manager.print_request_response_on_error and not result): resulttxt += "\n-------BEGIN:REQUEST-------\n" resulttxt += http.requestData resulttxt += "\n--------END:REQUEST--------\n" if req.print_response or (self.manager.print_request_response_on_error and not result): resulttxt += "\n-------BEGIN:RESPONSE-------\n" resulttxt += "%s %s %s\n" % (getVersionStringFromResponse(response), response.status, response.reason,) resulttxt += str(response.msg) + "\n" + respdata resulttxt += "\n--------END:RESPONSE--------\n" if etags is not None and req.method == "GET": hdrs = response.msg.getheaders("Etag") if hdrs: etags[uri] = hdrs[0].encode("utf-8") if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) if req.grabcount: ctr = None if result and (response is not None) and (response.status == 207) and (respdata is not None): tree = ElementTree(file=StringIO(respdata)) ctr = len(tree.findall("{DAV:}response")) - 1 if ctr == None or ctr == -1: result = False resulttxt += "\nCould not count resources in response\n" else: self.manager.server_info.addextrasubs({req.grabcount: str(ctr)}) if req.grabheader: for hdrname, variable in req.grabheader: hdrs = response.msg.getheaders(hdrname) if hdrs: self.manager.server_info.addextrasubs({variable: hdrs[0].encode("utf-8")}) else: result = False resulttxt += "\nHeader %s was not extracted from response\n" % (hdrname,) if req.grabproperty: if response.status == 207: for propname, variable in req.grabproperty: # grab the property here propvalue = self.extractProperty(propname, respdata) if propvalue == None: result = False resulttxt += "\nProperty %s was not extracted from multistatus response\n" % (propname,) else: self.manager.server_info.addextrasubs({variable: propvalue.encode("utf-8")}) if req.grabelement: for item in req.grabelement: if len(item) == 2: elementpath, variables = item parent = None else: elementpath, parent, variables = item parent = self.manager.server_info.extrasubs(parent) # grab the property here elementvalues = self.extractElements(elementpath, parent, respdata) if elementvalues == None: result = False resulttxt += "\nElement %s was not extracted from response\n" % (elementpath,) elif len(variables) != len(elementvalues): result = False resulttxt += "\n%d found but expecting %d for element %s from response\n" % (len(elementvalues), len(variables), elementpath,) else: for variable, elementvalue in zip(variables, elementvalues): self.manager.server_info.addextrasubs({variable: elementvalue.encode("utf-8") if elementvalue else ""}) if req.grabjson: for pointer, variables in req.grabjson: # grab the JSON value here pointervalues = self.extractPointer(pointer, respdata) if pointervalues == None: result = False resulttxt += "\Pointer %s was not extracted from response\n" % (pointer,) elif len(variables) != len(pointervalues): result = False resulttxt += "\n%d found but expecting %d for pointer %s from response\n" % (len(pointervalues), len(variables), pointer,) else: for variable, pointervalue in zip(variables, pointervalues): self.manager.server_info.addextrasubs({variable: pointervalue.encode("utf-8") if pointervalue else ""}) if req.grabcalprop: for propname, variable in req.grabcalprop: # grab the property here propname = self.manager.server_info.subs(propname) propname = self.manager.server_info.extrasubs(propname) propvalue = self.extractCalProperty(propname, respdata) if propvalue == None: result = False resulttxt += "\nCalendar property %s was not extracted from response\n" % (propname,) else: self.manager.server_info.addextrasubs({variable: propvalue.encode("utf-8")}) if req.grabcalparam: for paramname, variable in req.grabcalparam: # grab the property here paramname = self.manager.server_info.subs(paramname) paramname = self.manager.server_info.extrasubs(paramname) paramvalue = self.extractCalParameter(paramname, respdata) if paramvalue == None: result = False resulttxt += "\nCalendar Parameter %s was not extracted from response\n" % (paramname,) else: self.manager.server_info.addextrasubs({variable: paramvalue.encode("utf-8")}) return result, resulttxt, response, respdata
def dorequest(self, req, details=False, doverify=True, forceverify=False, stats=None, etags=None, label="", count=1): req.count = count if isinstance(req, pause): # Useful for pausing at a particular point print "Paused" sys.stdin.readline() return True, "", None, None if len(req.missingFeatures()) != 0: return True, "", None, None if len(req.excludedFeatures()) != 0: return True, "", None, None # Special check for DELETEALL if req.method == "DELETEALL": for ruri in req.ruris: collection = (ruri, req.user, req.pswd) hrefs = self.dofindall(req, collection, label="%s | %s" % (label, "DELETEALL")) if not self.dodeleteall(req, hrefs, label="%s | %s" % (label, "DELETEALL")): return False, "DELETEALL failed for: {r}".format(r=ruri), None, None return True, "", None, None # Special for delay elif req.method == "DELAY": # self.ruri contains a numeric delay in seconds delay = int(req.ruri) starttime = time.time() while (time.time() < starttime + delay): pass return True, "", None, None # Special for GETNEW elif req.method == "GETNEW": collection = (req.ruri, req.user, req.pswd) self.grabbedlocation = self.dofindnew(req, collection, label=label) if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) req.method = "GET" req.ruri = "$" # Special for FINDNEW elif req.method == "FINDNEW": collection = (req.ruri, req.user, req.pswd) self.grabbedlocation = self.dofindnew(req, collection, label=label) if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) return True, "", None, None # Special for GETOTHER elif req.method == "GETOTHER": collection = (req.ruri, req.user, req.pswd) self.grabbedlocation = self.dofindnew(req, collection, label=label, other=True) if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) req.method = "GET" req.ruri = "$" # Special for GETCONTAINS elif req.method.startswith("GETCONTAINS"): match = req.method[12:] collection = (req.ruri, req.user, req.pswd) self.grabbedlocation = self.dofindcontains(req, collection, match, label=label) if not self.grabbedlocation: return False, "No matching resource", None, None if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) req.method = "GET" req.ruri = "$" # Special check for WAITCOUNT elif req.method.startswith("WAITCOUNT"): count = int(req.method[10:]) for ruri in req.ruris: collection = (ruri, req.user, req.pswd) waitresult, waitdetails = self.dowaitcount(req, collection, count, label=label) if not waitresult: return False, "Count did not change: {w}".format(w=waitdetails), None, None else: return True, "", None, None # Special check for WAITDELETEALL elif req.method.startswith("WAITDELETEALL"): count = int(req.method[len("WAITDELETEALL"):]) for ruri in req.ruris: collection = (ruri, req.user, req.pswd) waitresult, waitdetails = self.dowaitcount(req, collection, count, label=label) if waitresult: hrefs = self.dofindall(req, collection, label="%s | %s" % (label, "DELETEALL")) self.dodeleteall(req, hrefs, label="%s | %s" % (label, "DELETEALL")) else: return False, "Count did not change: {w}".format(w=waitdetails), None, None else: return True, "", None, None result = True resulttxt = "" response = None respdata = None method = req.method uri = req.getURI(self.manager.server_info) if (uri == "$"): uri = self.grabbedlocation headers = req.getHeaders(self.manager.server_info) data = req.getData() # Cache delayed delete if req.end_delete: self.end_deletes.append((uri, req,)) if details: resulttxt += " %s: %s\n" % (method, uri) # Special for GETCHANGED if req.method == "GETCHANGED": if not self.dowaitchanged( req, uri, etags[uri], req.user, req.pswd, label=label ): return False, "Resource did not change", None, None method = "GET" # Start request timer if required if stats: stats.startTimer() # Do the http request http = SmartHTTPConnection( req.host, req.port, self.manager.server_info.ssl, afunix=req.afunix, cert=os.path.join(self.manager.server_info.certdir, req.cert) if req.cert else None ) if 'User-Agent' not in headers and label is not None: headers['User-Agent'] = label.encode("utf-8") try: puri = list(urlparse.urlparse(uri)) if req.ruri_quote: puri[2] = urllib.quote(puri[2]) quri = urlparse.urlunparse(puri) http.request(method, quri, data, headers) response = http.getresponse() respdata = None respdata = response.read() finally: http.close() # Stop request timer before verification if stats: stats.endTimer() if doverify and (respdata is not None): result, txt = self.verifyrequest(req, uri, response, respdata) resulttxt += txt elif forceverify: result = (response.status / 100 == 2) if not result: resulttxt += "Status Code Error: %d" % response.status if req.print_request or (self.manager.print_request_response_on_error and not result and not req.wait_for_success): requesttxt = "\n-------BEGIN:REQUEST-------\n" requesttxt += http.requestData requesttxt += "\n--------END:REQUEST--------\n" self.manager.message("protocol", requesttxt) if req.print_response or (self.manager.print_request_response_on_error and not result and not req.wait_for_success): responsetxt = "\n-------BEGIN:RESPONSE-------\n" responsetxt += "%s %s %s\n" % (getVersionStringFromResponse(response), response.status, response.reason,) responsetxt += str(response.msg) + "\n" + respdata responsetxt += "\n--------END:RESPONSE--------\n" self.manager.message("protocol", responsetxt) if etags is not None and req.method == "GET": hdrs = response.msg.getheaders("Etag") if hdrs: etags[uri] = hdrs[0].encode("utf-8") if req.graburi: self.manager.server_info.addextrasubs({req.graburi: self.grabbedlocation}) if req.grabcount: ctr = None if result and (response is not None) and (response.status == 207) and (respdata is not None): tree = ElementTree(file=StringIO(respdata)) ctr = len(tree.findall("{DAV:}response")) - 1 if ctr is None or ctr == -1: result = False resulttxt += "\nCould not count resources in response\n" else: self.manager.server_info.addextrasubs({req.grabcount: str(ctr)}) if req.grabheader: for hdrname, variable in req.grabheader: hdrs = response.msg.getheaders(hdrname) if hdrs: self.manager.server_info.addextrasubs({variable: hdrs[0].encode("utf-8")}) else: result = False resulttxt += "\nHeader %s was not extracted from response\n" % (hdrname,) if req.grabproperty: if response.status == 207: for propname, variable in req.grabproperty: # grab the property here propvalue = self.extractProperty(propname, respdata) if propvalue is None: result = False resulttxt += "\nProperty %s was not extracted from multistatus response\n" % (propname,) else: self.manager.server_info.addextrasubs({variable: propvalue.encode("utf-8")}) if req.grabelement: for item in req.grabelement: if len(item) == 2: elementpath, variables = item parent = None else: elementpath, parent, variables = item parent = self.manager.server_info.extrasubs(parent) # grab the property here elementvalues = self.extractElements(elementpath, parent, respdata) if elementvalues is None: result = False resulttxt += "\nElement %s was not extracted from response\n" % (elementpath,) elif len(variables) != len(elementvalues): result = False resulttxt += "\n%d found but expecting %d for element %s from response\n" % (len(elementvalues), len(variables), elementpath,) else: for variable, elementvalue in zip(variables, elementvalues): self.manager.server_info.addextrasubs({variable: elementvalue.encode("utf-8") if elementvalue else ""}) if req.grabjson: for pointer, variables in req.grabjson: # grab the JSON value here pointervalues = self.extractPointer(pointer, respdata) if pointervalues is None: result = False resulttxt += "\Pointer %s was not extracted from response\n" % (pointer,) elif len(variables) != len(pointervalues): result = False resulttxt += "\n%d found but expecting %d for pointer %s from response\n" % (len(pointervalues), len(variables), pointer,) else: for variable, pointervalue in zip(variables, pointervalues): self.manager.server_info.addextrasubs({variable: pointervalue.encode("utf-8") if pointervalue else ""}) if req.grabcalprop: for propname, variable in req.grabcalprop: # grab the property here propname = self.manager.server_info.subs(propname) propname = self.manager.server_info.extrasubs(propname) propvalue = self.extractCalProperty(propname, respdata) if propvalue is None: result = False resulttxt += "\nCalendar property %s was not extracted from response\n" % (propname,) else: self.manager.server_info.addextrasubs({variable: propvalue.encode("utf-8")}) if req.grabcalparam: for paramname, variable in req.grabcalparam: # grab the property here paramname = self.manager.server_info.subs(paramname) paramname = self.manager.server_info.extrasubs(paramname) paramvalue = self.extractCalParameter(paramname, respdata) if paramvalue is None: result = False resulttxt += "\nCalendar Parameter %s was not extracted from response\n" % (paramname,) else: self.manager.server_info.addextrasubs({variable: paramvalue.encode("utf-8")}) return result, resulttxt, response, respdata
class GraphMLReader(GraphML): """Read a GraphML document. Produces NetworkX graph objects.""" def __init__(self, node_type=str, edge_key_type=int): try: import xml.etree.ElementTree except ImportError: msg = 'GraphML reader requires xml.elementtree.ElementTree' raise ImportError(msg) self.node_type = node_type self.edge_key_type = edge_key_type self.multigraph = False # assume multigraph and test for multiedges self.edge_ids = {} # dict mapping (u,v) tuples to id edge attributes def __call__(self, path=None, string=None): if path is not None: self.xml = ElementTree(file=path) elif string is not None: self.xml = fromstring(string) else: raise ValueError("Must specify either 'path' or 'string' as kwarg") (keys, defaults) = self.find_graphml_keys(self.xml) for g in self.xml.findall("{%s}graph" % self.NS_GRAPHML): yield self.make_graph(g, keys, defaults) def make_graph(self, graph_xml, graphml_keys, defaults, G=None): # set default graph type edgedefault = graph_xml.get("edgedefault", None) if G is None: if edgedefault == 'directed': G = nx.MultiDiGraph() else: G = nx.MultiGraph() # set defaults for graph attributes G.graph['node_default'] = {} G.graph['edge_default'] = {} for key_id, value in defaults.items(): key_for = graphml_keys[key_id]['for'] name = graphml_keys[key_id]['name'] python_type = graphml_keys[key_id]['type'] if key_for == 'node': G.graph['node_default'].update({name: python_type(value)}) if key_for == 'edge': G.graph['edge_default'].update({name: python_type(value)}) # hyperedges are not supported hyperedge = graph_xml.find("{%s}hyperedge" % self.NS_GRAPHML) if hyperedge is not None: raise nx.NetworkXError("GraphML reader doesn't support hyperedges") # add nodes for node_xml in graph_xml.findall("{%s}node" % self.NS_GRAPHML): self.add_node(G, node_xml, graphml_keys, defaults) # add edges for edge_xml in graph_xml.findall("{%s}edge" % self.NS_GRAPHML): self.add_edge(G, edge_xml, graphml_keys) # add graph data data = self.decode_data_elements(graphml_keys, graph_xml) G.graph.update(data) # switch to Graph or DiGraph if no parallel edges were found. if not self.multigraph: if G.is_directed(): G = nx.DiGraph(G) else: G = nx.Graph(G) nx.set_edge_attributes(G, values=self.edge_ids, name='id') return G def add_node(self, G, node_xml, graphml_keys, defaults): """Add a node to the graph. """ # warn on finding unsupported ports tag ports = node_xml.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # find the node by id and cast it to the appropriate type node_id = self.node_type(node_xml.get("id")) # get data/attributes for node data = self.decode_data_elements(graphml_keys, node_xml) G.add_node(node_id, **data) # get child nodes if node_xml.attrib.get('yfiles.foldertype') == 'group': graph_xml = node_xml.find("{%s}graph" % self.NS_GRAPHML) self.make_graph(graph_xml, graphml_keys, defaults, G) def add_edge(self, G, edge_element, graphml_keys): """Add an edge to the graph. """ # warn on finding unsupported ports tag ports = edge_element.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # raise error if we find mixed directed and undirected edges directed = edge_element.get("directed") if G.is_directed() and directed == 'false': msg = "directed=false edge found in directed graph." raise nx.NetworkXError(msg) if (not G.is_directed()) and directed == 'true': msg = "directed=true edge found in undirected graph." raise nx.NetworkXError(msg) source = self.node_type(edge_element.get("source")) target = self.node_type(edge_element.get("target")) data = self.decode_data_elements(graphml_keys, edge_element) # GraphML stores edge ids as an attribute # NetworkX uses them as keys in multigraphs too if no key # attribute is specified edge_id = edge_element.get("id") if edge_id: # self.edge_ids is used by `make_graph` method for non-multigraphs self.edge_ids[source, target] = edge_id try: edge_id = self.edge_key_type(edge_id) except ValueError: # Could not convert. pass else: edge_id = data.get('key') if G.has_edge(source, target): # mark this as a multigraph self.multigraph = True # Use add_edges_from to avoid error with add_edge when `'key' in data` G.add_edges_from([(source, target, edge_id, data)]) def decode_data_elements(self, graphml_keys, obj_xml): """Use the key information to decode the data XML if present.""" data = {} for data_element in obj_xml.findall("{%s}data" % self.NS_GRAPHML): key = data_element.get("key") try: data_name = graphml_keys[key]['name'] data_type = graphml_keys[key]['type'] except KeyError: raise nx.NetworkXError("Bad GraphML data: no key %s" % key) text = data_element.text # assume anything with subelements is a yfiles extension if text is not None and len(list(data_element)) == 0: if data_type == bool: # Ignore cases. # http://docs.oracle.com/javase/6/docs/api/java/lang/ # Boolean.html#parseBoolean%28java.lang.String%29 data[data_name] = self.convert_bool[text.lower()] else: data[data_name] = data_type(text) elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: pref = "{%s}%s/{%s}" % (self.NS_Y, node_type, self.NS_Y) geometry = data_element.find("%sGeometry" % pref) if geometry is not None: data['x'] = geometry.get('x') data['y'] = geometry.get('y') if node_label is None: node_label = data_element.find("%sNodeLabel" % pref) if node_label is not None: data['label'] = node_label.text # check all the different types of edges avaivable in yEd. for e in [ 'PolyLineEdge', 'SplineEdge', 'QuadCurveEdge', 'BezierEdge', 'ArcEdge' ]: pref = "{%s}%s/{%s}" % (self.NS_Y, e, self.NS_Y) edge_label = data_element.find("%sEdgeLabel" % pref) if edge_label is not None: break if edge_label is not None: data['label'] = edge_label.text return data def find_graphml_keys(self, graph_element): """Extracts all the keys and key defaults from the xml. """ graphml_keys = {} graphml_key_defaults = {} for k in graph_element.findall("{%s}key" % self.NS_GRAPHML): attr_id = k.get("id") attr_type = k.get('attr.type') attr_name = k.get("attr.name") yfiles_type = k.get("yfiles.type") if yfiles_type is not None: attr_name = yfiles_type attr_type = 'yfiles' if attr_type is None: attr_type = "string" warnings.warn("No key type for id %s. Using string" % attr_id) if attr_name is None: raise nx.NetworkXError("Unknown key for id %s." % attr_id) graphml_keys[attr_id] = { "name": attr_name, "type": self.python_type[attr_type], "for": k.get("for") } # check for "default" subelement of key element default = k.find("{%s}default" % self.NS_GRAPHML) if default is not None: graphml_key_defaults[attr_id] = default.text return graphml_keys, graphml_key_defaults
def verify(self, manager, uri, response, respdata, args): #@UnusedVariable # Must have status 200 if response.status != 200: return False, " HTTP Status Code Wrong: %d" % (response.status,) # Get expected FREEBUSY info users = args.get("attendee", []) busy = args.get("busy", []) tentative = args.get("tentative", []) unavailable = args.get("unavailable", []) # Extract each calendar-data object try: tree = ElementTree(file=StringIO.StringIO(respdata)) except ExpatError: return False, " Could not parse proper XML response\n" for calendar in tree.findall("./{urn:ietf:params:xml:ns:caldav}response/{urn:ietf:params:xml:ns:caldav}calendar-data"): # Parse data as calendar object try: calendar = Calendar.parseText(calendar.text) # Check for calendar if calendar is None: raise ValueError("Not a calendar: %s" % (calendar,)) # Only one component comps = calendar.getComponents("VFREEBUSY") if len(comps) != 1: raise ValueError("Wrong number or unexpected components in calendar") # Must be VFREEBUSY fb = comps[0] # Check for attendee value for attendee in fb.getProperties("ATTENDEE"): if attendee.getValue().getValue() in users: users.remove(attendee.getValue().getValue()) break else: continue # Extract periods busyp = [] tentativep = [] unavailablep = [] for fp in fb.getProperties("FREEBUSY"): periods = fp.getValue().getValues() # Convert start/duration to start/end for i in range(len(periods)): periods[i].getValue().setUseDuration(False) # Check param fbtype = "BUSY" if fp.hasParameter("FBTYPE"): fbtype = fp.getParameterValue("FBTYPE") if fbtype == "BUSY": busyp.extend(periods) elif fbtype == "BUSY-TENTATIVE": tentativep.extend(periods) elif fbtype == "BUSY-UNAVAILABLE": unavailablep.extend(periods) else: raise ValueError("Unknown FBTYPE: %s" % (fbtype,)) # Set sizes must match if ( (len(busy) != len(busyp)) or (len(unavailable) != len(unavailablep)) or (len(tentative) != len(tentativep)) ): raise ValueError("Period list sizes do not match.") # Convert to string sets busy = set(busy) busyp = [x.getValue().getText() for x in busyp] busyp = set(busyp) tentative = set(tentative) tentativep = [x.getValue().getText() for x in tentativep] tentativep = set(tentativep) unavailable = set(unavailable) unavailablep = [x.getValue().getText() for x in unavailablep] unavailablep = set(unavailablep) # Compare all periods if len(busyp.symmetric_difference(busy)): raise ValueError("Busy periods do not match") elif len(tentativep.symmetric_difference(tentative)): raise ValueError("Busy-tentative periods do not match") elif len(unavailablep.symmetric_difference(unavailable)): raise ValueError("Busy-unavailable periods do not match") break except InvalidData: return False, " HTTP response data is not a calendar" except ValueError, txt: return False, " HTTP response data is invalid: %s" % (txt,)
def updatebq(self): from xml.etree.cElementTree import ElementTree tree = ElementTree() tree.parse(GSXML) tvlist = [] for iptv in tree.findall("iptv"): name = iptv.findtext("name").title() (protocol, serviceType, bufferSize, epgId) = iptv.findtext("type").split(":") uri = iptv.findtext("uri") if protocol in "livestreamer": uri = "http://localhost:88/" + uri uri = uri.replace(":", "%3a") service = "#SERVICE {s}:0:1:{e}:{e}:0:0:0:0:0:{u}:{n}\n".format(s=serviceType,e=epgId,u=uri,n=name) tvlist.append((name,service)) tvlist=sorted(tvlist, key=lambda channel: channel[0]) #sort by name with open(GSBQ, "w") as f: f.write("#NAME GreekStreamTV\n") for (name, service) in tvlist: f.write(service) com = "cat /usr/lib/enigma2/python/Plugins/Satdreamgr/UpdateBouquet/stream.xml ; rm /usr/lib/enigma2/python/Plugins/Satdreamgr/UpdateBouquet/stream.xml" out = os.popen(com) return list