def async_live_stream(self, output): metafile = re.findall(r"HTTP getting '(.*)'\n$", output) if len(metafile) == 0: _LOGGER.warning('No link found for %s', self._url) _LOGGER.warning("output: %s", output) return websession = async_get_clientsession(self._hass) request = None try: with async_timeout.timeout(10, loop=self._hass.loop): request = yield from websession.get(metafile[0]) if request.status != 200: _LOGGER.error("Error %d on load url %s", request.status, request.url) return data = yield from request.read() root = ET.fromstring(data) nodes = root.findall('.//url') if len(nodes) > 0: return (yield from self.async_play_url(nodes[0].text)) else: _LOGGER.error('no url') except (asyncio.TimeoutError, aiohttp.errors.ClientError): _LOGGER.error("Timeout.") finally: if request is not None: yield from request.release() return False
def get_url(hass, url): websession = async_get_clientsession(hass) request = None try: with async_timeout.timeout(10, loop=hass.loop): request = yield from websession.get(url) if request.status != 200: _LOGGER.error("Error %d on load url %s", request.status, request.url) return None return (yield from request.read()) except (asyncio.TimeoutError, aiohttp.errors.ClientError): _LOGGER.error('Timeout downloading url.') finally: if request is not None: yield from request.release() return None