def parse_description(self): def gotPage(x): self.debug("got device description from %r" % self.location) data, headers = x tree = utils.parse_xml(data, 'utf-8').getroot() major = tree.findtext('./{%s}specVersion/{%s}major' % (ns, ns)) minor = tree.findtext('./{%s}specVersion/{%s}minor' % (ns, ns)) try: self.upnp_version = '.'.join((major, minor)) except: self.upnp_version = 'n/a' try: self.urlbase = tree.findtext('./{%s}URLBase' % ns) except: import traceback self.debug(traceback.format_exc()) d = tree.find('./{%s}device' % ns) if d is not None: self.parse_device(d) # root device def gotError(failure, url): self.warning("error getting device description from %r", url) self.info(failure) utils.getPage(self.location).addCallbacks(gotPage, gotError, None, None, [self.location], None)
def parse_description(self): def gotPage(x): self.debug("got device description from %r" % self.location) data, _headers = x xml_data = None try: xml_data = utils.parse_xml(data, "utf-8") except: self.warning("Invalid device description received from %r", self.location) import traceback self.debug(traceback.format_exc()) if xml_data is not None: tree = xml_data.getroot() major = tree.findtext("./{%s}specVersion/{%s}major" % (ns, ns)) minor = tree.findtext("./{%s}specVersion/{%s}minor" % (ns, ns)) try: self.upnp_version = ".".join((major, minor)) except: self.upnp_version = "n/a" try: self.urlbase = tree.findtext("./{%s}URLBase" % ns) except: import traceback self.debug(traceback.format_exc()) d = tree.find("./{%s}device" % ns) if d is not None: self.parse_device(d) # root device def gotError(failure, url): self.warning("error getting device description from %r", url) self.info(failure) utils.getPage(self.location).addCallbacks(gotPage, gotError, None, None, [self.location], None)
def parse_description(self): def gotPage(x): self.debug("got device description from %r" % self.location) data, headers = x tree = utils.parse_xml(data, 'utf-8').getroot() major = tree.findtext('./{%s}specVersion/{%s}major' % (ns,ns)) minor = tree.findtext('./{%s}specVersion/{%s}minor' % (ns,ns)) try: self.upnp_version = '.'.join((major,minor)) except: self.upnp_version = 'n/a' try: self.urlbase = tree.findtext('./{%s}URLBase' % ns) except: import traceback self.debug(traceback.format_exc()) d = tree.find('./{%s}device' % ns) if d is not None: self.parse_device(d) # root device def gotError(failure, url): self.warning("error getting device description from %r", url) self.info(failure) utils.getPage(self.location).addCallbacks(gotPage, gotError, None, None, [self.location], None)
def parse_actions(self): def gotPage(x): #print "gotPage" #print x data, headers = x tree = utils.parse_xml(data, 'utf-8').getroot() ns = "urn:schemas-upnp-org:service-1-0" for action_node in tree.findall('.//{%s}action' % ns): name = action_node.findtext('{%s}name' % ns) arguments = [] for argument in action_node.findall('.//{%s}argument' % ns): arg_name = argument.findtext('{%s}name' % ns) arg_direction = argument.findtext('{%s}direction' % ns) arg_state_var = argument.findtext('{%s}relatedStateVariable' % ns) arguments.append(action.Argument(arg_name, arg_direction, arg_state_var)) self._actions[name] = action.Action(self, name, 'n/a', arguments) for var_node in tree.findall('.//{%s}stateVariable' % ns): send_events = var_node.attrib.get('sendEvents','yes') name = var_node.findtext('{%s}name' % ns) data_type = var_node.findtext('{%s}dataType' % ns) values = [] """ we need to ignore this, as there we don't get there our {urn:schemas-beebits-net:service-1-0}X_withVendorDefines attibute there """ #for allowed in var_node.findall('.//{%s}allowedValue' % ns): # values.append(allowed.text) instance = 0 self._variables.get(instance)[name] = variable.StateVariable(self, name, 'n/a', instance, send_events, data_type, values) if send_events.lower() == "yes": self.does_sends_events = True #print 'service parse:', self, self.device self.detection_completed = True louie.send('Coherence.UPnP.Service.detection_completed', sender=self.device, device=self.device) self.info("send signal Coherence.UPnP.Service.detection_completed for %r" % self) """ if (self.last_time_updated == None): if( self.id.endswith('AVTransport') or self.id.endswith('RenderingControl')): louie.send('Coherence.UPnP.DeviceClient.Service.notified', sender=self.device, service=self) self.last_time_updated = time.time() """ def gotError(failure, url): self.warning('error requesting', url) self.info('failure', failure) louie.send('Coherence.UPnP.Service.detection_failed', self.device, device=self.device) #print 'getPage', self.get_scpd_url() utils.getPage(self.get_scpd_url()).addCallbacks(gotPage, gotError, None, None, [self.get_scpd_url()], None)
def upnp_init(self): self.current_connection_id = None #parent = self.append('iRadio', None) #self.append({'name':'GrooveFM','mimetype':'audio/mpeg','url':'http://80.252.111.34:10028/'}, parent) #self.append({'name':'Dancing Queen','mimetype':'audio/mpeg','url':'http://netzflocken.de/files/dq.mp3'}, parent) parent = self.append({'name':'iRadio','mimetype':'directory'}, None) def got_page(result): result = utils.parse_xml(result, encoding='utf-8') for genre in result.findall('genre'): self.append({'name':genre.get('name').encode('utf-8'), 'mimetype':'directory', 'url':'%s?genre=%s' % (self.config.get('genrelist','http://www.shoutcast.com/sbin/newxml.phtml'),genre.get('name'))},parent) def got_error(error): self.warning("connection to ShoutCast service failed! %r", error) self.debug("%r", error.getTraceback()) utils.getPage(self.config.get('genrelist','http://www.shoutcast.com/sbin/newxml.phtml')).addCallbacks(got_page, got_error, None, None, None, None) if self.server: self.server.connection_manager_server.set_variable(0, 'SourceProtocolInfo', ['http-get:*:audio/mpeg:*', 'http-get:*:audio/x-scpls:*'], default=True)
def parse_description(self): def gotPage(x): self.debug("got device description from %r" % self.location) data, headers = x tree = utils.parse_xml(data, 'utf-8').getroot() # This is the base for all relative URLs self.url_base = tree.findtext('./{%s}URLBase' % ns) if self.url_base is None or len(self.url_base) == 0: # not given in description so use location # TODO should we parse out only protocol and host or use as is # I think the url_base could contain part of a path as well. parsed = urlparse(self.location) # ensure has trailing slash self.url_base = "%s://%s/" % (parsed[0], parsed[1]) self.debug("url_base %r" % self.url_base) d = tree.find('.//{%s}device' % ns) if d is not None: self.parse_device(d) # root device def gotError(failure, url): self.warning("error requesting %r", url) self.info(failure) utils.getPage(self.location).addCallbacks(gotPage, gotError, None, None, [self.location], None)
def upnp_ImportResource(self, *args, **kwargs): print 'upnp_ImportResource', args, kwargs SourceURI = kwargs['SourceURI'] DestinationURI = kwargs['DestinationURI'] if DestinationURI.endswith('?import'): id = DestinationURI.split('/')[-1] id = id[:-7] # remove the ?import else: return failure.Failure(errorCode(718)) item = self.get_by_id(id) if item == None: return failure.Failure(errorCode(718)) def gotPage(result): try: import cStringIO as StringIO except ImportError: import StringIO self.backend_import(item, StringIO.StringIO(result[0])) def gotError(error, url): self.warning("error requesting %s", url) self.info(error) return failure.Failure(errorCode(718)) d = getPage(SourceURI) d.addCallbacks(gotPage, gotError, None, None, [SourceURI], None) transfer_id = 0 # FIXME return {'TransferID': transfer_id}
def retrieveChannelItems (self, parent, channel_id): uri = "https://www.miroguide.com/api/get_channel?id=%s" % channel_id d = utils.getPage(uri) def gotItems(result): if result is None: print("Unable to retrieve items for channel %s" % channel_id) return data, header = result channel = eval(data) items = [] if ('item' in channel): items = channel['item'] for item in items: #print "item:",item url = item['url'] description = item['description'] #print "description:", description name = item['name'] thumbnail_url = None if ('thumbnail_url' in channel): #print "Thumbnail:", channel['thumbnail_url'] thumbnail_url = channel['thumbnail_url'] #size = size['size'] item = VideoItem(name, description, url, thumbnail_url, self) item.parent = parent parent.add_child(item, external_id=url) def gotError(error): print("ERROR: %s" % error) d.addCallbacks(gotItems, gotError) return d
def followRedirects(self, request): self.info("HTTP redirect ", request, self.stream_url) d = utils.getPage(self.stream_url, method="HEAD", followRedirect=0) def gotHeader(result, request): data, header = result self.info("finally got something %r", header) #FIXME what do we do here if the headers aren't there? self.filesize = int(header['content-length'][0]) self.mimetype = header['content-type'][0] return request def gotError(error, request): # error should be a "Failure" instance at this point self.info("gotError" % error) error_value = error.value if (isinstance(error_value, PageRedirect)): self.info("got PageRedirect %r" % error_value.location) self.stream_url = error_value.location self.resetUri(self.stream_url) return self.followRedirects(request) else: self.warning("Error while retrieving page header for URI ", self.stream_url) self.requestFinished(None) return error d.addCallback(gotHeader, request) d.addErrback(gotError, request) return d
def followRedirects(self, request): self.info("HTTP redirect ", request, self.stream_url) d = utils.getPage(self.stream_url, method="HEAD", followRedirect=0) def gotHeader(result,request): data,header = result self.info("finally got something %r", header) #FIXME what do we do here if the headers aren't there? self.filesize = int(header['content-length'][0]) self.mimetype = header['content-type'][0] return request def gotError(error,request): # error should be a "Failure" instance at this point self.info("gotError" % error) error_value = error.value if (isinstance(error_value,PageRedirect)): self.info("got PageRedirect %r" % error_value.location) self.stream_url = error_value.location self.resetUri(self.stream_url) return self.followRedirects(request) else: self.warning("Error while retrieving page header for URI ", self.stream_url) self.requestFinished(None) return error d.addCallback(gotHeader, request) d.addErrback(gotError,request) return d
def retrieveChannelItems(self, parent, channel_id): uri = f'https://www.miroguide.com/api/get_channel?id={channel_id}' d = utils.getPage(uri) def gotItems(result): if result is None: print(f'Unable to retrieve items for channel {channel_id}') return data, header = result channel = eval(data) items = [] if 'item' in channel: items = channel['item'] for item in items: # print('item:',item) url = item['url'] description = item['description'] # print('description:', description) name = item['name'] thumbnail_url = None if 'thumbnail_url' in channel: # print('Thumbnail:', channel['thumbnail_url']) thumbnail_url = channel['thumbnail_url'] # size = size['size'] item = VideoItem(name, description, url, thumbnail_url, self) item.parent = parent parent.add_child(item, external_id=url) def gotError(error): print(f'ERROR: {error}') d.addCallbacks(gotItems, gotError) return d
def retrieveCategories (self, parent): filepath = self.jukebox_url + "Categories.xml" dfr = getPage(filepath) def read_categories(data, parent_item, jukebox_url): for category in data.findall('category'): type = category.get('name') category_title = type if (type != 'Other'): category_title = "By %s" % category_title categoryItem = Container(parent_item, category_title) parent_item.add_child(categoryItem) for index in category.findall('./index'): name = index.get('name') first_filename = index.text root_name = first_filename[:-2] self.debug("adding index %s:%s", type, name) parent = categoryItem if (type == 'Other'): parent = parent_item indexItem = LazyContainer(parent, name, None, self.refresh, self.retrieveIndexMovies, per_page=1, name=name, root_name=root_name) parent.add_child(indexItem) self.init_completed() def fail_categories_read(f): self.warning("failure reading yamj categories (%s): %r", filepath, f.getErrorMessage()) return f dfr.addCallback(etree.fromstring) dfr.addErrback(fail_categories_read) dfr.addCallback(read_categories, parent_item=parent, jukebox_url=self.jukebox_url) dfr.addErrback(fail_categories_read) return dfr
def retrieveChannelItems (self, parent, channel_id): uri = "https://www.miroguide.com/api/get_channel?id=%s" % channel_id d = utils.getPage(uri) def gotItems(result): if result is None: print "Unable to retrieve items for channel %s" % channel_id return data,header = result channel = eval(data) items = [] if (channel.has_key('item')): items = channel['item'] for item in items: #print item url = item['url'] description = item['description'] name = item['name'] thumbnail_url = None if (channel.has_key('thumbnail_url')): #print "Thumbnail:", channel['thumbnail_url'] thumbnail_url = channel['thumbnail_url'] #size = size['size'] item = VideoItem(name, description, url, thumbnail_url, self) item.parent = parent parent.add_child(item, external_id=url) def gotError(error): print "ERROR: %s" % error d.addCallbacks(gotItems, gotError) return d
def followRedirects(self, request): self.info(f'HTTP redirect {request} {self.stream_url}') d = utils.getPage(self.stream_url, method='HEAD', followRedirect=0) def gotHeader(result, request): data, header = result self.info(f'finally got something {header}') # FIXME what do we do here if the headers aren't there? self.filesize = int(header['content-length'][0]) self.mimetype = header['content-type'][0] return request def gotError(error, request): # error should be a 'Failure' instance at this point self.info('gotError %s', error) error_value = error.value if isinstance(error_value, PageRedirect): self.info(f'got PageRedirect {error_value.location}') self.stream_url = error_value.location self.resetUri(self.stream_url) return self.followRedirects(request) else: self.warning(f'Error while retrieving page header ' f'for URI {self.stream_url}') self.requestFinished(None) return error d.addCallback(gotHeader, request) d.addErrback(gotError, request) return d
def callRemote(self, soapmethod, *args, **kwargs): soapaction = soapmethod or self.soapaction if '#' not in soapaction: soapaction = '#'.join((self.namespace[1],soapaction)) self.action = soapaction.split('#')[1] self.info("callRemote %r %r %r %r", self.soapaction, soapmethod, self.namespace, self.action) headers = { 'content-type': 'text/xml ;charset="utf-8"', 'SOAPACTION': '"%s"' % soapaction,} if kwargs.has_key('headers'): headers.update(kwargs['headers']) del kwargs['headers'] payload = soap_lite.build_soap_call("{%s}%s" % (self.namespace[1], self.action), kwargs, encoding=None) self.info("callRemote soapaction %s %s: ", self.action, self.url) self.debug("callRemote payload: %s", payload) def gotError(failure, url): # failure.value should be an Error object self.error("error requesting %s %s %s [%s]", url, failure, failure.value.status, failure.value.response ) if int(failure.value.status) == 500: # generic error, do we have abody? # if so parse and return. tree = parse_xml(failure.value.response) self.log_dom(tree.getroot()) return failure return getPage(self.url, postdata=payload, method="POST", headers=headers ).addCallbacks(self._cbGotResult, gotError, None, None, [self.url], None)
def retrieveList(self, parent): self.info("Retrieving Shoutcast TV listing...") def got_page(result): if self.retrieveList_attemptCount == 0: self.info("Connection to ShoutCast service successful for TV listing") else: self.warning("Connection to ShoutCast service successful for TV listing after %d attempts.", self.retrieveList_attemptCount) result = result[0] result = utils.parse_xml(result, encoding='utf-8') genres = [] stations = {} for stationResult in result.findall('station'): mimetype = VIDEO_MIMETYPE station_id = stationResult.get('id') bitrate = stationResult.get('br') rating = stationResult.get('rt') name = stationResult.get('name').encode('utf-8') genre = stationResult.get('genre') url = SHOUTCAST_TUNEIN_URL % (station_id) if genres.count(genre) == 0: genres.append(genre) sameStation = stations.get(name) if sameStation == None or bitrate > sameStation['bitrate']: station = {'name': name, 'station_id': station_id, 'mimetype': mimetype, 'id': station_id, 'url': url, 'bitrate': bitrate, 'rating': rating, 'genre': genre} stations[name] = station genreItems = {} for genre in genres: genreItem = self.appendGenre(genre, parent) genreItems[genre] = genreItem for station in list(stations.values()): genre = station.get('genre') parentItem = genreItems[genre] self.appendFeed({'name': station.get('name'), 'mimetype': station['mimetype'], 'id': station.get('station_id'), 'url': station.get('url')}, parentItem) def got_error(error): self.warning("Connection to ShoutCast service failed. Will retry in 5s!") self.debug("%r", error.getTraceback()) # will retry later self.retrieveList_attemptCount += 1 reactor.callLater(5, self.retrieveList, parent=parent) d = utils.getPage(self.shoutcast_ws_url) d.addCallbacks(got_page, got_error)
def retrieveList(self, parent): self.info("Retrieving Shoutcast TV listing...") def got_page(result): if self.retrieveList_attemptCount == 0: self.info("Connection to ShoutCast service successful for TV listing") else: self.warning("Connection to ShoutCast service successful for TV listing after %d attempts.", self.retrieveList_attemptCount) result = result[0] result = utils.parse_xml(result, encoding='utf-8') genres = [] stations = {} for stationResult in result.findall('station'): mimetype = VIDEO_MIMETYPE station_id = stationResult.get('id') bitrate = stationResult.get('br') rating = stationResult.get('rt') name = stationResult.get('name').encode('utf-8') genre = stationResult.get('genre') url = SHOUTCAST_TUNEIN_URL % (station_id) if genres.count(genre) == 0: genres.append(genre) sameStation = stations.get(name) if sameStation == None or bitrate > sameStation['bitrate']: station = {'name': name, 'station_id': station_id, 'mimetype': mimetype, 'id': station_id, 'url': url, 'bitrate': bitrate, 'rating': rating, 'genre': genre} stations[name] = station genreItems = {} for genre in genres: genreItem = self.appendGenre(genre, parent) genreItems[genre] = genreItem for station in stations.values(): genre = station.get('genre') parentItem = genreItems[genre] self.appendFeed({'name': station.get('name'), 'mimetype': station['mimetype'], 'id': station.get('station_id'), 'url': station.get('url')}, parentItem) def got_error(error): self.warning("Connection to ShoutCast service failed. Will retry in 5s!") self.debug("%r", error.getTraceback()) # will retry later self.retrieveList_attemptCount += 1 reactor.callLater(5, self.retrieveList, parent=parent) d = utils.getPage(self.shoutcast_ws_url) d.addCallbacks(got_page, got_error)
def parse_description(self): def gotPage(x): self.debug(f'got device description from {self.location}') self.debug(f'data is {x}') data, headers = x xml_data = None try: xml_data = etree.fromstring(data) except Exception: self.warning( f'Invalid device description received from {self.location}' ) import traceback self.debug(traceback.format_exc()) if xml_data is not None: tree = xml_data major = tree.findtext(f'./{{{ns}}}specVersion/{{{ns}}}major') minor = tree.findtext(f'./{{{ns}}}specVersion/{{{ns}}}minor') try: self.upnp_version = '.'.join((major, minor)) except Exception: self.upnp_version = 'n/a' try: self.urlbase = tree.findtext(f'./{{{ns}}}URLBase') except Exception: import traceback self.debug(traceback.format_exc()) d = tree.find(f'./{{{ns}}}device') if d is not None: self.parse_device(d) # root device self.debug(f'device parsed successfully {self.location}') def gotError(failure, url): self.warning(f'error getting device description from {url}') self.info(failure) try: utils.getPage(self.location).addCallbacks( gotPage, gotError, None, None, [self.location], None ) except Exception as e: self.error(f'Error on parsing device description: {e}')
def test_getPage(self): content = '0123456789' headers = {'accept-ranges': ['bytes'], 'content-length': ['10'], 'content-type': ['text/html']} d = utils.getPage(self.getURL("file")) d.addCallback(self.assertResponse, content, headers) return d
def test_getPage(self): content = b'0123456789' headers = {b'accept-ranges': [b'bytes'], b'content-length': [b'10'], b'content-type': [b'text/html']} d = utils.getPage(self.getURL("file")) d.addCallback(self.assertResponse, content, headers) return d
def set_item_size_and_date(self): def gotPhoto(result): self.debug("gotPhoto %s", result) _, headers = result length = headers.get('content-length', None) modified = headers.get('last-modified', None) if length != None: self.item.res[0].size = int(length[0]) if modified != None: """ Tue, 06 Feb 2007 15:56:32 GMT """ self.item.date = datetime(*parsedate_tz(modified[0])[0:6]) def gotError(failure, url): self.warning("error requesting %s %s", failure, url) self.info(failure) getPage(self.real_url, method='HEAD', timeout=60).addCallbacks(gotPhoto, gotError, None, None, [self.real_url], None)
def retrieveItemsForGenre (self, parent, genres, per_page = 1, offset = 0, page = 0): genre = genres[page] if page < len(genres) - 1: parent.childrenRetrievingNeeded = True url = '%s?genre=%s' % (self.shoutcast_ws_url, genre) if genre_families.has_key(genre): family_genres = genre_families[genre] for family_genre in family_genres: self.append_genre(parent, family_genre) def got_page(result): self.info('connection to ShoutCast service successful for genre %s' % genre) result = utils.parse_xml(result, encoding = 'utf-8') tunein = result.find('tunein') if tunein != None: tunein = tunein.get('base', '/sbin/tunein-station.pls') prot, host_port, _path, _, _ = urlsplit(self.shoutcast_ws_url) tunein = prot + '://' + host_port + tunein stations = {} for stationResult in result.findall('station'): mimetype = stationResult.get('mt') station_id = stationResult.get('id') bitrate = stationResult.get('br') name = stationResult.get('name').encode('utf-8') # remove useless substrings (eg. '[Shoutcast.com]' ) from title for substring in useless_title_content: name = name.replace(substring, "") lower_name = name.lower() url = '%s?id=%s' % (tunein, stationResult.get('id')) sameStation = stations.get(lower_name) if sameStation == None or bitrate > sameStation['bitrate']: station = {'name':name, 'station_id':station_id, 'mimetype':mimetype, 'id':station_id, 'url':url, 'bitrate':bitrate } stations[lower_name] = station for station in stations.values(): station_id = station.get('station_id') name = station.get('name') url = station.get('url') mimetype = station.get('mimetype') item = IRadioItem(station_id, name, url, mimetype) parent.add_child(item, external_id = station_id) return True def got_error(error): self.warning("connection to ShoutCast service failed: %s" % url) self.debug("%r", error.getTraceback()) parent.childrenRetrievingNeeded = True # we retry return Failure("Unable to retrieve stations for genre" % genre) d = utils.getPage(url) d.addCallbacks(got_page, got_error) return d
def login(self): if self.sessionid is not None: self.warning('Session seems to be valid', ) return def got_page(result): lines = result[0].split('\n') for line in lines: tuple = line.rstrip().split('=', 1) if len(tuple) == 2: if tuple[0] == 'session': self.sessionid = tuple[1] self.info(f'Got new sessionid: {self.sessionid}') if tuple[0] == 'base_url': if self.host != tuple[1]: self.host = tuple[1] self.info(f'Got new host: {self.host}') if tuple[0] == 'base_path': if self.basepath != tuple[1]: self.basepath = tuple[1] self.info(f'Got new path: {self.basepath}') self.get_tracks() def got_error(error): self.warning(f'Login to LastFM Failed! {error}') self.debug(f'{error.getTraceback()}') # This function might be GPL! # Found this code in some other Projects, too. def hexify(s): result = '' for c in s: result = result + ('%02x' % ord(c)) return result password = hexify(md5(self.passwd).digest()) req = \ self.basepath + '/handshake.php/?version=1&platform=win&username='******'&passwordmd5=' + password\ + '&language=en&player=coherence' utils.getPage('http://' + self.host + req).addCallbacks( got_page, got_error, None, None, None, None)
def retrieveList(self, parent): def got_page(result): print "connection to ShoutCast service successful for TV listing" result = result[0] result = utils.parse_xml(result, encoding='utf-8') genres = [] stations = {} for stationResult in result.findall('station'): mimetype = VIDEO_MIMETYPE station_id = stationResult.get('id') bitrate = stationResult.get('br') rating = stationResult.get('rt') name = stationResult.get('name').encode('utf-8') genre = stationResult.get('genre') url = SHOUTCAST_TUNEIN_URL % (station_id) if genres.count(genre) == 0: genres.append(genre) sameStation = stations.get(name) if sameStation == None or bitrate>sameStation['bitrate']: station = {'name':name, 'station_id':station_id, 'mimetype':mimetype, 'id':station_id, 'url':url, 'bitrate':bitrate, 'rating':rating, 'genre':genre } stations[name] = station genreItems = {} for genre in genres: genreItem = self.appendGenre(genre, parent) genreItems[genre] = genreItem for station in stations.values(): genre = station.get('genre') parentItem = genreItems[genre] self.appendFeed({'name':station.get('name'), 'mimetype':station['mimetype'], 'id':station.get('station_id'), 'url':station.get('url')}, parentItem) def got_error(error): print ("connection to ShoutCast service failed! %r" % error) self.debug("%r", error.getTraceback()) d = utils.getPage(self.shoutcast_ws_url) d.addCallbacks(got_page, got_error)
def get_tracks(self): if self.getting_tracks: return def got_page(result): result = utils.parse_xml(result, encoding='utf-8') self.getting_tracks = False print(self.getting_tracks) print('got Tracks') for track in result.findall('trackList/track'): data = {} def get_data(name): # print track.find(name).text.encode('utf-8') return track.find(name).text.encode('utf-8') # Fixme: This section needs some work print('adding Track') data['mimetype'] = 'audio/mpeg' data['name'] = get_data('creator') + ' - ' + get_data('title') data['title'] = get_data('title') data['artist'] = get_data('creator') data['creator'] = get_data('creator') data['album'] = get_data('album') data['duration'] = get_data('duration') # FIXME: Image is the wrong tag. data['image'] = get_data('image') data['url'] = track.find('location').text.encode('utf-8') item = self.parent.store.append(data, self.parent) self.tracks.append(item) def got_error(error): self.warning(f'Problem getting Tracks! {error}') self.debug(f'{error.getTraceback()}') self.getting_tracks = False self.getting_tracks = True req = \ self.basepath + '/xspf.php?sk=' + self.sessionid \ + '&discovery=0&desktop=1.3.1.1' utils.getPage('http://' + self.host + req).addCallbacks( got_page, got_error, None, None, None, None)
def resend_request(result, old_request): # exchange the auth token in the resending request new_request = old_request.split('&') for part in new_request: if part.startswith('auth='): new_request[new_request.index( part)] = 'auth=%s' % self.token break new_request = '&'.join(new_request) self.info("ampache_query %r", new_request) return utils.getPage(new_request)
def get_children(self,start=0,request_count=0): if self.children == None: def got_page(result): result = utils.parse_xml(result, encoding='utf-8') tunein = result.find('tunein') if tunein != None: tunein = tunein.get('base','/sbin/tunein-station.pls') prot,host_port,path,_,_ = urlsplit(self.store.config.get('genrelist','http://www.shoutcast.com/sbin/newxml.phtml')) tunein = prot + '://' + host_port + tunein def append_new(result, s): result = result[0].split('\n') for line in result: if line.startswith('File1='): s['url'] = line[6:] self.store.append(s,self) break l = [] for station in result.findall('station'): if station.get('mt') == 'audio/mpeg': d2 = utils.getPage('%s?id=%s' % (tunein, station.get('id')), timeout=20) d2.addCallback(append_new, {'name':station.get('name').encode('utf-8'), 'mimetype':station.get('mt'), 'id':station.get('id'), 'url':None}) d2.addErrback(got_error) l.append(d2) dl = defer.DeferredList(l) def process_items(result): print "process_item", result, self.children if self.children == None: return [] if request_count == 0: return self.children[start:] else: return self.children[start:request_count] dl.addCallback(process_items) return dl def got_error(error): self.warning("connection to ShoutCast service failed! %r", error) self.debug("%r", error.getTraceback()) d = utils.getPage('%s?genre=%s' % (self.store.config.get('genrelist','http://www.shoutcast.com/sbin/newxml.phtml'),self.name)) d.addCallbacks(got_page, got_error, None, None, None, None) return d else: if request_count == 0: return self.children[start:] else: return self.children[start:request_count]
def upnp_init(self): self.current_connection_id = None #parent = self.append('iRadio', None) #self.append({'name':'GrooveFM','mimetype':'audio/mpeg','url':'http://80.252.111.34:10028/'}, parent) #self.append({'name':'Dancing Queen','mimetype':'audio/mpeg','url':'http://netzflocken.de/files/dq.mp3'}, parent) parent = self.append({'name': 'iRadio', 'mimetype': 'directory'}, None) def got_page(result): result = utils.parse_xml(result, encoding='utf-8') for genre in result.findall('genre'): self.append( { 'name': genre.get('name').encode('utf-8'), 'mimetype': 'directory', 'url': '%s?genre=%s' % (self.config.get( 'genrelist', 'http://www.shoutcast.com/sbin/newxml.phtml'), genre.get('name')) }, parent) def got_error(error): self.warning("connection to ShoutCast service failed! %r", error) self.debug("%r", error.getTraceback()) utils.getPage( self.config.get( 'genrelist', 'http://www.shoutcast.com/sbin/newxml.phtml')).addCallbacks( got_page, got_error, None, None, None, None) if self.server: self.server.connection_manager_server.set_variable( 0, 'SourceProtocolInfo', ['http-get:*:audio/mpeg:*', 'http-get:*:audio/x-scpls:*'], default=True)
def ampache_query(self, item, start=0, request_count=0, filter=None): request = ''.join( (self.url, f'?action={item}&auth={self.token}&offset={start:d}')) if request_count > 0: request = ''.join((request, f'&limit={request_count:d}')) if filter is not None: request = ''.join((request, f'&filter={filter}')) self.info(f'ampache_query {request}') d = utils.getPage(request) d.addCallback(self.got_response, item, request) d.addErrback(self.got_error) return d
def _retrieve_children(self, parent=None, **kwargs): if self.children_url is None: return kwargs.update({'limit': self.limit}) kwargs = {k: v for k, v in list(kwargs.items()) if v is not None} url = "%s?%s" % (self.children_url, urllib.parse.urlencode(kwargs)) if kwargs else self.children_url d = utils.getPage(url) d.addCallbacks(self._got_page, self._got_error) return d
def update_data(self): def fail(f): print('fail', f) return f dfr = getPage(self.rss_url) dfr.addCallback(etree.fromstring) dfr.addErrback(fail) dfr.addCallback(self.parse_data) dfr.addErrback(fail) dfr.addBoth(self.queue_update) return dfr
def _retrieve_children(self, parent=None, **kwargs): if self.children_url is None: return kwargs.update({'limit': self.limit}) kwargs = {k: v for k, v in kwargs.items() if v is not None} url = "%s?%s" % (self.children_url, urllib.urlencode(kwargs)) if kwargs else self.children_url d = utils.getPage(url) d.addCallbacks(self._got_page, self._got_error) return d
def login(self): if self.sessionid != None: self.warning("Session seems to be valid", ) return def got_page(result): lines = result[0].split("\n") for line in lines: tuple = line.rstrip().split("=", 1) if len(tuple) == 2: if tuple[0] == "session": self.sessionid = tuple[1] self.info("Got new sessionid: %r", self.sessionid) if tuple[0] == "base_url": if (self.host != tuple[1]): self.host = tuple[1] self.info("Got new host: %s", self.host) if tuple[0] == "base_path": if (self.basepath != tuple[1]): self.basepath = tuple[1] self.info("Got new path: %s", self.basepath) self.get_tracks() def got_error(error): self.warning("Login to LastFM Failed! %r", error) self.debug("%r", error.getTraceback()) def hexify( s ): # This function might be GPL! Found this code in some other Projects, too. result = "" for c in s: result = result + ("%02x" % ord(c)) return result password = hexify(md5(self.passwd).digest()) req = self.basepath + "/handshake.php/?version=1&platform=win&username="******"&passwordmd5=" + password + "&language=en&player=coherence" utils.getPage("http://" + self.host + req).addCallbacks( got_page, got_error, None, None, None, None)
def update_data(self): def fail(f): print "fail", f return f dfr = getPage(self.rss_url) dfr.addCallback(parse_xml) dfr.addErrback(fail) dfr.addCallback(self.parse_data) dfr.addErrback(fail) dfr.addBoth(self.queue_update) return dfr
def update_data(self): def fail(f): print "fail", f return f dfr = getPage(self.rss_url) dfr.addCallback(etree.fromstring) dfr.addErrback(fail) dfr.addCallback(self.parse_data) dfr.addErrback(fail) dfr.addBoth(self.queue_update) return dfr
def update_data(self): # trigger an update of the data # fetch the rss dfr = getPage(self.rss_url) # push it through our xml parser dfr.addCallback(fromstring) # then parse the data into our models dfr.addCallback(self.parse_data) return dfr
def ampache_query(self, item, start=0, request_count=0, filter=None): request = ''.join( (self.url, '?action=%s&auth=%s&offset=%d' % (item, self.token, start))) if request_count > 0: request = ''.join((request, '&limit=%d' % request_count)) if filter != None: request = ''.join((request, '&filter=%s' % filter)) self.info("ampache_query %r", request) d = utils.getPage(request) d.addCallback(self.got_response, item, request) d.addErrback(self.got_error) return d
def update_data(self): # trigger an update of the data # fetch the rss dfr = getPage(self.rss_url) # push it through our xml parser dfr.addCallback(parse_xml) # then parse the data into our models dfr.addCallback(self.parse_data) return dfr
def callRemote(self, soapmethod, arguments): soapaction = soapmethod or self.soapaction if '#' not in soapaction: soapaction = '#'.join((self.namespace[1], soapaction)) self.action = soapaction.split('#')[1] self.info("callRemote %r %r %r %r", self.soapaction, soapmethod, self.namespace, self.action) headers = { 'content-type': 'text/xml ;charset="utf-8"', 'SOAPACTION': '"%s"' % soapaction, } if arguments.has_key('headers'): headers.update(arguments['headers']) del arguments['headers'] payload = soap_lite.build_soap_call("{%s}%s" % (self.namespace[1], self.action), arguments, encoding=None) self.info("callRemote soapaction: ", self.action, self.url) self.debug("callRemote payload: ", payload) def gotError(error, url): self.warning("error requesting url %r" % url) self.debug(error) try: tree = parse_xml(error.value.response) body = tree.find( '{http://schemas.xmlsoap.org/soap/envelope/}Body') return failure.Failure( Exception("%s - %s" % ( body.find( './/{urn:schemas-upnp-org:control-1-0}errorCode'). text, body.find( './/{urn:schemas-upnp-org:control-1-0}errorDescription' ).text))) except: import traceback self.debug(traceback.format_exc()) return error return getPage(self.url, postdata=payload, method="POST", headers=headers).addCallbacks(self._cbGotResult, gotError, None, None, [self.url], None)
def get_tracks(self): if self.getting_tracks == True: return def got_page(result): result = utils.parse_xml(result, encoding='utf-8') self.getting_tracks = False print self.getting_tracks print "got Tracks" for track in result.findall('trackList/track'): data = {} def get_data(name): #print track.find(name).text.encode('utf-8') return track.find(name).text.encode('utf-8') #Fixme: This section needs some work print "adding Track" data['mimetype'] = 'audio/mpeg' data['name'] = get_data('creator') + " - " + get_data('title') data['title'] = get_data('title') data['artist'] = get_data('creator') data['creator'] = get_data('creator') data['album'] = get_data('album') data['duration'] = get_data('duration') #FIXME: Image is the wrong tag. data['image'] = get_data('image') data['url'] = track.find('location').text.encode('utf-8') item = self.parent.store.append(data, self.parent) self.tracks.append(item) def got_error(error): self.warning("Problem getting Tracks! %r", error) self.debug("%r", error.getTraceback()) self.getting_tracks = False self.getting_tracks = True req = self.basepath + "/xspf.php?sk=" + self.sessionid + "&discovery=0&desktop=1.3.1.1" utils.getPage("http://" + self.host + req).addCallbacks(got_page, got_error, None, None, None, None)
def gotLoggedInPage(result): data,headers = result if re.search(r'(?i)<form[^>]* name="loginForm"', data) is not None: print 'WARNING: unable to log in: bad username or password' return #print "logged in in Youtube" # Confirm age age_form = { 'next_url': '/', 'action_confirm': 'Confirm', } postdata = urlencode(age_form) d = getPage(self._AGE_URL, postdata=postdata, headers=std_headers)
def login(self): if self.sessionid != None: self.warning("Session seems to be valid",) return def got_page(result): lines = result[0].split("\n") for line in lines: tuple = line.rstrip().split("=", 1) if len(tuple) == 2: if tuple[0] == "session": self.sessionid = tuple[1] self.info("Got new sessionid: %r",self.sessionid ) if tuple[0] == "base_url": if(self.host != tuple[1]): self.host = tuple[1] self.info("Got new host: %s",self.host ) if tuple[0] == "base_path": if(self.basepath != tuple[1]): self.basepath = tuple[1] self.info("Got new path: %s",self.basepath) self.get_tracks() def got_error(error): self.warning("Login to LastFM Failed! %r", error) self.debug("%r", error.getTraceback()) def hexify(s): # This function might be GPL! Found this code in some other Projects, too. result = "" for c in s: result = result + ("%02x" % ord(c)) return result password = hexify(md5.md5(self.passwd).digest()) req = self.basepath + "/handshake.php/?version=1&platform=win&username="******"&passwordmd5=" + password + "&language=en&player=coherence" utils.getPage("http://" + self.host + req).addCallbacks(got_page, got_error, None, None, None, None)
def update_data(self, rss_url, container = None, encoding = "utf-8"): """ creates a deferred chain to retrieve the rdf file, parse and extract the metadata and reschedule itself """ def fail(f): self.info("fail %r", f) self.debug(f.getTraceback()) return f dfr = getPage(rss_url) dfr.addCallback(parse_xml, encoding = encoding) dfr.addErrback(fail) dfr.addCallback(self.parse_data, container) dfr.addErrback(fail) dfr.addBoth(self.queue_update, rss_url, container) return dfr
def gotLoggedInPage(result): data, headers = result if re.search(r'(?i)<form[^>]* name="loginForm"', data) is not None: print 'WARNING: unable to log in: bad username or password' return print "logged in in Youtube" # Confirm age age_form = { 'next_url': '/', 'action_confirm': 'Confirm', } postdata = urlencode(age_form) d = getPage(self._AGE_URL, postdata=postdata, headers=std_headers) d.addCallback(gotAgeConfirmedPage)
def render(self, request): if self.stream_url is None: def got_playlist(result): if result is None: self.warning( 'Error to retrieve playlist - nothing retrieved') return requestFinished(result) result = result[0].split('\n') for line in result: if line.startswith('File1='): self.stream_url = line[6:] break if self.stream_url is None: self.warning( 'Error to retrieve playlist - inconsistent playlist file' ) return requestFinished(result) #self.resetUri(self.stream_url) request.uri = self.stream_url return self.render(request) def got_error(error): self.warning( 'Error to retrieve playlist - unable to retrieve data') self.warning(error) return None playlist_url = self.uri d = utils.getPage(playlist_url, timeout=20) d.addCallbacks(got_playlist, got_error) return server.NOT_DONE_YET self.info("this is our render method", request.method, request.uri, request.client, request.clientproto) self.info("render", request.getAllHeaders()) if request.clientproto == 'HTTP/1.1': self.connection = request.getHeader('connection') if self.connection: tokens = map(str.lower, self.connection.split(' ')) if 'close' in tokens: d = request.notifyFinish() d.addBoth(self.requestFinished) else: d = request.notifyFinish() d.addBoth(self.requestFinished) return utils.ReverseProxyUriResource.render(self, request)
def update_data(self, rss_url, container=None): '''Creates a deferred chain to retrieve the rdf file, parse and extract the metadata and reschedule itself.''' def fail(f): # TODO fix loggable thing self.info(f'fail {f}') self.debug(f.getTraceback()) return f dfr = getPage(rss_url) dfr.addCallback(etree.fromstring) dfr.addErrback(fail) dfr.addCallback(self.parse_data, container) dfr.addErrback(fail) dfr.addBoth(self.queue_update, rss_url, container) return dfr
def gotLanguageSet(result): data,headers = result # No authentication to be performed if username is None: return # Log in login_form = { 'current_form': 'loginForm', 'next': '/', 'action_login': '******', 'username': username, 'password': password, } postdata = urlencode(login_form) d = getPage(self._LOGIN_URL, method='POST', postdata=postdata, headers=std_headers) d.addCallbacks(gotLoggedInPage, gotLoginError)
def gotLanguageSet(result): data, headers = result # No authentication to be performed if username is None: return # Log in login_form = { 'current_form': 'loginForm', 'next': '/', 'action_login': '******', 'username': username, 'password': password, } postdata = urlencode(login_form) d = getPage(self._LOGIN_URL, method='POST', postdata=postdata, headers=std_headers) d.addCallbacks(gotLoggedInPage, gotLoginError)
def update_data(self, rss_url, container=None, encoding="utf-8"): """ creates a deferred chain to retrieve the rdf file, parse and extract the metadata and reschedule itself """ def fail(f): self.info("fail %r", f) self.debug(f.getTraceback()) return f dfr = getPage(rss_url) dfr.addCallback(parse_xml, encoding=encoding) dfr.addErrback(fail) dfr.addCallback(self.parse_data, container) dfr.addErrback(fail) dfr.addBoth(self.queue_update, rss_url, container) return dfr
def render(self, request): if self.stream_url is None: def got_playlist(result): if result is None: self.warning( 'Error to retrieve playlist - nothing retrieved' ) return self.requestFinished(result) result = result[0].split('\n') for line in result: if line.startswith('File1='): self.stream_url = line[6:].split(';')[0] break # print('stream URL:', self.stream_url) if self.stream_url is None: self.warning( 'Error to retrieve playlist - ' 'inconsistent playlist file' ) return self.requestFinished(result) # self.resetUri(self.stream_url) request.uri = self.stream_url return self.render(request) def got_error(error): self.warning(error) return None playlist_url = self.uri # print('playlist URL:', playlist_url) d = utils.getPage(playlist_url, timeout=20) d.addCallbacks(got_playlist, got_error) return server.NOT_DONE_YET if request.clientproto == 'HTTP/1.1': self.connection = request.getHeader('connection') if self.connection: tokens = list(map(str.lower, self.connection.split(' '))) if 'close' in tokens: d = request.notifyFinish() d.addBoth(self.requestFinished) else: d = request.notifyFinish() d.addBoth(self.requestFinished) return utils.ReverseProxyUriResource.render(self, request)
def got_page(result): result = utils.parse_xml(result, encoding='utf-8') tunein = result.find('tunein') if tunein != None: tunein = tunein.get('base', '/sbin/tunein-station.pls') prot, host_port, path, _, _ = urlsplit( self.store.config.get( 'genrelist', 'http://www.shoutcast.com/sbin/newxml.phtml')) tunein = prot + '://' + host_port + tunein def append_new(result, s): result = result[0].split('\n') for line in result: if line.startswith('File1='): s['url'] = line[6:] self.store.append(s, self) break l = [] for station in result.findall('station'): if station.get('mt') == 'audio/mpeg': d2 = utils.getPage('%s?id=%s' % (tunein, station.get('id')), timeout=20) d2.addCallback( append_new, { 'name': station.get('name').encode('utf-8'), 'mimetype': station.get('mt'), 'id': station.get('id'), 'url': None }) d2.addErrback(got_error) l.append(d2) dl = defer.DeferredList(l) def process_items(result): self.info("process_item", result, self.children) if self.children == None: return [] if request_count == 0: return self.children[start:] else: return self.children[start:request_count] dl.addCallback(process_items) return dl
def parse_opml(self): def fail(f): self.info("fail %r", f) return f def create_containers(data): feeds = [] for feed in data.findall('body/outline'): if (feed.attrib['type'] == 'link' and feed.attrib['url'] not in feeds): feeds.append(feed.attrib['url']) self.update_data(feed.attrib['url'], self.get_next_id()) dfr = getPage(self.opml) dfr.addCallback(etree.fromstring) dfr.addErrback(fail) dfr.addCallback(create_containers) dfr.addErrback(fail)
def render(self, request): if self.stream_url is None: def got_playlist(result): if result is None: self.warning('Error to retrieve playlist - nothing retrieved') return requestFinished(result) result = result[0].split('\n') for line in result: if line.startswith('File1='): self.stream_url = line[6:] break if self.stream_url is None: self.warning('Error to retrieve playlist - inconsistent playlist file') return requestFinished(result) #self.resetUri(self.stream_url) request.uri = self.stream_url return self.render(request) def got_error(error): self.warning('Error to retrieve playlist - unable to retrieve data') self.warning(error) return None playlist_url = self.uri d = utils.getPage(playlist_url, timeout=20) d.addCallbacks(got_playlist, got_error) return server.NOT_DONE_YET self.info("this is our render method",request.method, request.uri, request.client, request.clientproto) self.info("render", request.getAllHeaders()) if request.clientproto == 'HTTP/1.1': self.connection = request.getHeader('connection') if self.connection: tokens = map(str.lower, self.connection.split(' ')) if 'close' in tokens: d = request.notifyFinish() d.addBoth(self.requestFinished) else: d = request.notifyFinish() d.addBoth(self.requestFinished) return utils.ReverseProxyUriResource.render(self, request)
def _do_request(self, request): """ Send a request, encoded as described in the Gallery Remote protocol. request - a dictionary of protocol parameters and values """ if self.auth_token != None: request['g2_authToken'] = self.auth_token url = self.url if (len(request) > 0) : url += '?' for key,value in request.iteritems(): url += '%s=%s&' % (key,value) headers = None if self.cookie != '': headers = {'Cookie' : self.cookie} def gotPage(result): data,headers = result response = self._parse_response( data ) if response['status'] != '0': raise response['status_text'] try: self.auth_token = response['auth_token'] except: pass if headers.has_key('set-cookie'): cookie_info = headers['set-cookie'][-1] self.cookie = cookie_info.split(';')[0] return response def gotError(error): print "Unable to process Gallery2 request: %s" % url print "Error: %s" % error return None d = getPage(url, headers=headers) d.addCallback(gotPage) d.addErrback(gotError) return d
def retrieveGenreList(self): """ Retrieve the whole list of genres from the shoutcast server to complete the population of the genre families classification (genres not previously classified are put into the "Misc" family) ...and fire mediaserver init completion """ def got_page(result): if self.retrieveGenreList_attemptCount == 0: self.info("Connection to ShoutCast service successful for genre listing") else: self.warning("Connection to ShoutCast service successful for genre listing after %d attempts." % self.retrieveGenreList_attemptCount) result = utils.parse_xml(result, encoding = 'utf-8') genres = {} main_synonym_genre = {} for main_genre, sub_genres in synonym_genres.items(): genres[main_genre] = sub_genres for genre in sub_genres: main_synonym_genre[genre] = main_genre for genre in result.findall('genre'): name = genre.get('name') if name not in main_synonym_genre: genres[name] = [name] main_synonym_genre[name] = name for main_genre, sub_genres in genres.items(): if not self.genre_parent_items.has_key(main_genre): genre_families["Misc"].append(main_genre) self.init_completed() def got_error(error): self.warning("connection to ShoutCast service for genre listing failed - Will retry! %r", error) self.debug("%r", error.getTraceback()) self.retrieveGenreList_attemptCount += 1 reactor.callLater(5, self.retrieveGenreList) d = utils.getPage(self.shoutcast_ws_url) d.addCallback(got_page) d.addErrback(got_error) return d
def retrievePlaylistItems (self, url, parent_item): def gotPlaylist(playlist): self.info("got playlist") items = {} if playlist: content, header = playlist lines = content.splitlines().__iter__() line = lines.next() while line is not None: if re.search('#EXTINF', line): channel = re.match('#EXTINF:.*,(.*)', line).group(1) mimetype = 'video/mpeg' line = lines.next() while re.search('#EXTVLCOPT', line): option = re.match('#EXTVLCOPT:(.*)', line).group(1) if option == 'no-video': mimetype = 'audio/mpeg' line = lines.next() url = line item = PlaylistItem(channel, url, mimetype) parent_item.add_child(item) try: line = lines.next() except StopIteration: line = None return items def gotError(error): self.warning("Unable to retrieve playlist: %s", url) print "Error: %s" % error return None d = getPage(url) d.addCallback(gotPlaylist) d.addErrback(gotError) return d
def flickr_upload(self, image, **kwargs): fields = {} for k, v in kwargs.items(): if v != None: fields[k] = v #fields['api_key'] = self.flickr_api_key fields['auth_token'] = self.flickr_authtoken fields['api_sig'] = self.flickr_create_api_signature(**fields) fields['api_key'] = self.flickr_api_key fields['photo'] = image (content_type, formdata) = self.encode_multipart_form(fields) headers = {"Content-Type": content_type, "Content-Length": str(len(formdata))} d = getPage("http://api.flickr.com/services/upload/", method="POST", headers=headers, postdata=formdata) def got_something(result): print "got_something", result result = parse_xml(result[0], encoding='utf-8') result = result.getroot() if(result.attrib['stat'] == 'ok' and result.find('photoid') != None): photoid = result.find('photoid').text return photoid else: error = result.find('err') return failure.Failure(Exception(error.attrib['msg'])) d.addBoth(got_something) return d