Beispiel #1
0
    def do_GET(self):
        url = self._get_url('GET')
        response = self._proxy_request('GET', url)
        manifest = self._session.get('manifest')

        if self._session.get('redirecting') or not self._session.get(
                'type') or not manifest or int(
                    response.headers.get('content-length', 0)) > 1000000:
            self._output_response(response)
            return

        parse = urlparse(self.path.lower())

        try:
            if self._session.get('type') == 'm3u8' and (
                    url == manifest or parse.path.endswith('.m3u')
                    or parse.path.endswith('.m3u8')):
                self._parse_m3u8(response)

            elif self._session.get('type') == 'mpd' and url == manifest:
                self._session[
                    'manifest'] = None  #unset manifest url so isn't parsed again
                self._parse_dash(response)
        except Exception as e:
            log.exception(e)

            if type(e) == Exit:
                response.status_code = 500
                response.stream.content = str(e).encode('utf-8')
                failed_playback()
            elif url == manifest:
                gui.error(_.QUALITY_PARSE_ERROR)

        self._output_response(response)
Beispiel #2
0
def get_integrations():
    try:
        return Session().gz_json(INTEGRATIONS_URL)
    except Exception as e:
        log.debug('Failed to get integrations')
        log.exception(e)
        return {}
Beispiel #3
0
    def api_call(self):
        if self.logged_in:
            self.refresh_token()

        try:
            yield
        except Exception as e:
            log.exception(e)
            raise APIError(_.NO_DATA)
Beispiel #4
0
def install_xbian():
    password = gui.input(_.XBIAN_PASSWORD, default='raspberry')
    sudo_cmd = 'echo "{}" | sudo -S su -c "{{}}"'.format(password)

    try:
        install_debian(sudo_cmd, 'xbian')
    except Exception as e:
        log.exception(e)
        raise Error(_.XBIAN_ERROR)
Beispiel #5
0
    def setup_buttons():
        log.debug('Setting up buttons')

        try:
            database.connect()

            Button.update(status=Button.Status.INACTIVE,
                          error=None).where(Button.enabled == True).execute()
            Button.update(status=Button.Status.DISABLED,
                          error=None).where(Button.enabled == False).execute()
            btns = list(Button.select().where(Button.enabled == True))

            buttons = []
            for btn in btns:
                if not btn.has_callbacks():
                    continue

                try:
                    button = gpiozero.Button(btn.pin,
                                             pull_up=btn.pull_up,
                                             bounce_time=btn.bounce_time
                                             or None,
                                             hold_time=btn.hold_time,
                                             hold_repeat=btn.hold_repeat)

                    if btn.when_pressed:
                        button.when_pressed = lambda function=btn.when_pressed: callback(
                            function)

                    if btn.when_released:
                        button.when_released = lambda function=btn.when_released: callback(
                            function)

                    if btn.when_held:
                        button.when_held = lambda function=btn.when_held: callback(
                            function)
                except Exception as e:
                    log.exception(e)
                    btn.status = Button.Status.ERROR
                    btn.error = e
                else:
                    btn.status = Button.Status.ACTIVE
                    buttons.append(button)

                btn.save()

            return buttons
        except Exception as e:
            log.debug(e)
            return []
        finally:
            database.close()
Beispiel #6
0
def start():
    log.debug('Shared Service: Started')

    player = Player()
    proxy = Proxy()

    try:
        proxy.start()
    except Exception as e:
        log.error('Failed to start proxy server')
        log.exception(e)

    ## Inital wait on boot
    monitor.waitForAbort(5)

    try:
        while not monitor.abortRequested():
            try: _check_news()
            except Exception as e: log.exception(e)

            try: _check_updates()
            except Exception as e: log.exception(e)

            if monitor.waitForAbort(5):
                break
    except KeyboardInterrupt:
        pass
    except Exception as e:
        log.exception(e)

    try: proxy.stop()
    except: pass

    log.debug('Shared Service: Stopped')
def merge_info(addon_id, merging=False):
    addon = get_addon(addon_id, required=True, install=False)
    addon_path = xbmc.translatePath(addon.getAddonInfo('path'))
    merge_path = os.path.join(addon_path, MERGE_SETTING_FILE)

    data = {}
    if os.path.exists(merge_path):
        try:
            with codecs.open(merge_path, 'r', encoding='utf8') as f:
                data = json.load(f)
                data['type'] = TYPE_IPTV_MERGE
        except Exception as e:
            log.exception(e)
            log.debug('failed to parse merge file: {}'.format(merge_path))
            return addon, {}

    elif addon.getSetting('iptv.enabled'):
        data = {
            'type': TYPE_IPTV_MANAGER,
            'playlist': addon.getSetting('iptv.channels_uri'),
            'epg': addon.getSetting('iptv.epg_uri'),
        }

    elif addon_id.lower() in INTEGRATIONS:
        data = INTEGRATIONS[addon_id.lower()]
        data['type'] = TYPE_INTEGRATION

    elif merging:
        raise Error('No integration found for this source')

    min_version = data.get('min_version')
    max_version = data.get('max_version')
    current_version = LooseVersion(addon.getAddonInfo('version'))

    if min_version and current_version < LooseVersion(min_version):
        if merging:
            raise Error('Min version {} required'.format(min_version))
        else:
            data = {}

    if max_version and current_version > LooseVersion(max_version):
        if merging:
            raise Error('Max version {} exceeded'.format(max_version))
        else:
            data = {}

    return addon, data
Beispiel #8
0
def merge_info(addon_id, integrations=None, merging=False):
    addon = get_addon(addon_id, required=True, install=False)
    addon_path = xbmc.translatePath(addon.getAddonInfo('path'))
    merge_path = os.path.join(addon_path, MERGE_SETTING_FILE)

    if os.path.exists(merge_path):
        try:
            with codecs.open(merge_path, 'r', encoding='utf8') as f:
                data = json.load(f)
        except Exception as e:
            log.exception(e)
            log.debug('failed to parse merge file: {}'.format(merge_path))
            return addon, {}
    else:
        if integrations is None:
            integrations = get_integrations()

        data = integrations.get(addon_id) or {}

        if merging:
            if not integrations:
                raise Error('Failed to download integrations')

            elif not data:
                raise Error('No integration found for this source')

    min_version = data.get('min_version')
    max_version = data.get('max_version')
    current_version = LooseVersion(addon.getAddonInfo('version'))

    if min_version and current_version < LooseVersion(min_version):
        if merging:
            raise Error('Min version {} required'.format(min_version))
        else:
            data = {}

    if max_version and current_version > LooseVersion(max_version):
        if merging:
            raise Error('Max version {} exceeded'.format(max_version))
        else:
            data = {}

    return addon, data
Beispiel #9
0
    def _proxy_request(self, method, url):
        self._session['redirecting'] = False

        if not url.lower().startswith(
                'http://') and not url.lower().startswith('https://'):
            response = Response()
            response.headers = {}
            response.stream = ResponseStream(response)

            if os.path.exists(url):
                response.ok = True
                response.status_code = 200
                with open(url, 'rb') as f:
                    response.stream.content = f.read()
                if not ADDON_DEV: remove_file(url)
            else:
                response.ok = False
                response.status_code = 500
                response.stream.content = "File not found: {}".format(
                    url).encode('utf-8')

            return response

        debug = self._session.get('debug_all') or self._session.get(
            'debug_{}'.format(method.lower()))
        if self._post_data and debug:
            with open(
                    xbmc.translatePath('special://temp/{}-request.txt').format(
                        method.lower()), 'wb') as f:
                f.write(self._post_data)

        if not self._session.get('session'):
            self._session['session'] = RawSession()
            self._session['session'].set_dns_rewrites(
                self._session.get('dns_rewrites', []))
        else:
            self._session['session'].headers.clear()
            #self._session['session'].cookies.clear() #lets handle cookies in session

        ## Fix any double // in url
        url = fix_url(url)

        retries = 3
        # some reason we get connection errors every so often when using a session. something to do with the socket
        for i in range(retries):
            try:
                response = self._session['session'].request(
                    method=method,
                    url=url,
                    headers=self._headers,
                    data=self._post_data,
                    allow_redirects=False,
                    stream=True)
            except ConnectionError as e:
                if 'Connection aborted' not in str(e) or i == retries - 1:
                    log.exception(e)
                    raise
            except Exception as e:
                log.exception(e)
                raise
            else:
                break

        response.stream = ResponseStream(response)

        log.debug('{} OUT: {} ({})'.format(method.upper(), url,
                                           response.status_code))

        headers = {}
        for header in response.headers:
            if header.lower() not in REMOVE_OUT_HEADERS:
                headers[header.lower()] = response.headers[header]

        response.headers = headers

        if debug:
            with open(
                    xbmc.translatePath(
                        'special://temp/{}-response.txt').format(
                            method.lower()), 'wb') as f:
                f.write(response.stream.content)

        if 'location' in response.headers:
            if '://' not in response.headers['location']:
                response.headers['location'] = urljoin(
                    url, response.headers['location'])

            self._session['redirecting'] = True
            self._update_urls(url, response.headers['location'])
            response.headers[
                'location'] = PROXY_PATH + response.headers['location']
            response.stream.content = b''

        if 'set-cookie' in response.headers:
            log.debug('set-cookie: {}'.format(response.headers['set-cookie']))
            ## we handle cookies in the requests session
            response.headers.pop('set-cookie')

        self._middleware(url, response)

        return response
Beispiel #10
0
def start():
    log.debug('Shared Service: Started')

    try:
        set_drm_level()
    except Exception as e:
        log.error('Failed to set DRM level')
        log.exception(e)

    player = Player()
    proxy = Proxy()

    try:
        proxy.start()
    except Exception as e:
        log.error('Failed to start proxy server')
        log.exception(e)

    is_donor = False
    try:
        is_donor = check_donor()
    except Exception as e:
        log.error('Failed to check donor')
        log.exception(e)

    if is_donor:
        log.debug('Welcome SlyGuy donor!')

    ## Inital wait on boot
    monitor.waitForAbort(5)

    try:
        while not monitor.abortRequested():
            if not is_donor or settings.getBool('show_news'):
                try:
                    _check_news()
                except Exception as e:
                    log.exception(e)

            if is_donor and settings.getBool('rapid_updates'):
                try:
                    check_updates()
                except Exception as e:
                    log.exception(e)

            if monitor.waitForAbort(60):
                break
    except KeyboardInterrupt:
        pass
    except Exception as e:
        log.exception(e)

    try:
        proxy.stop()
    except:
        pass

    log.debug('Shared Service: Stopped')
Beispiel #11
0
    def play_media(self, id):
        self._renew_token()

        params = {
            'form': 'json',
            'types': None,
            'fields': 'id,content',
            'byId': id,
        }

        data = self._session.get(PLAY_URL, params=params).json()

        if not data['entries']:
            raise APIError(_.VIDEO_UNAVAILABLE)

        videos = data['entries'][0]['media$content']

        chosen = videos[0]
        for video in videos:
            if video['plfile$format'].upper() == 'MPEG-DASH':
                chosen = video
                break

        if chosen['plfile$format'].upper() == 'F4M':
            raise APIError(_.ADOBE_ERROR)

        params = {
            'auth': userdata.get('play_token'),
            'formats': 'mpeg-dash',
            'tracking': True,
            'format': 'SMIL'
        }

        resp = self._session.get(chosen['plfile$url'], params=params)

        root = ET.fromstring(resp.text)
        strip_namespaces(root)

        if root.find("./body/seq/ref/param[@name='exception']") != None:
            error_msg = root.find("./body/seq/ref").attrib.get('abstract')
            raise APIError(_(_.PLAY_ERROR, message=error_msg))

        try:
            data = self._concurrency_unlock(root)
        except Exception as e:
            log.debug(
                'Failed to get concurrency lock. Attempting to continue without it...'
            )
            log.exception(e)

        ref = root.find(".//switch/ref")
        url = ref.attrib['src']

        tracking = {}
        for item in ref.find(
                "./param[@name='trackingData']").attrib['value'].split('|'):
            key, value = item.split('=')
            tracking[key] = value

        license = WIDEVINE_URL.format(token=userdata.get('play_token'),
                                      pid=tracking['pid'],
                                      challenge='B{SSM}')

        return url, license
Beispiel #12
0
def epg(output, **kwargs):
    country = userdata.get('country', DEFAULT_COUNTRY)
    epg_url = EPG_URLS.get(country)

    if epg_url:
        try:
            Session().chunked_dl(epg_url, output)
            if epg_url.endswith('.gz'):
                gzip_extract(output)
            return True
        except Exception as e:
            log.exception(e)
            log.debug('Failed to get remote epg: {}. Fall back to scraping'.format(epg_url))

    with codecs.open(output, 'w', encoding='utf8') as f:
        f.write(u'<?xml version="1.0" encoding="utf-8" ?><tv>')

        def process_data(id, data):
            program_count = 0
            for event in data:
                channel = event['channelTag']
                start = arrow.get(event['startDateTime']).to('utc')
                stop = arrow.get(event['endDateTime']).to('utc')
                title = event.get('title')
                subtitle = event.get('episodeTitle')
                series = event.get('seasonNumber')
                episode = event.get('episodeNumber')
                desc = event.get('longSynopsis')
                icon = event.get('thumbnailImagePaths', {}).get('THUMB')

                icon = u'<icon src="{}"/>'.format(icon) if icon else ''
                episode = u'<episode-num system="onscreen">S{}E{}</episode-num>'.format(series, episode) if series and episode else ''
                subtitle = u'<sub-title>{}</sub-title>'.format(escape(subtitle)) if subtitle else ''

                f.write(u'<programme channel="{id}" start="{start}" stop="{stop}"><title>{title}</title>{subtitle}{icon}{episode}<desc>{desc}</desc></programme>'.format(
                    id=channel, start=start.format('YYYYMMDDHHmmss Z'), stop=stop.format('YYYYMMDDHHmmss Z'), title=escape(title), subtitle=subtitle, episode=episode, icon=icon, desc=escape(desc)))

        ids = []
        no_events = []
        for row in api.channels():
            f.write(u'<channel id="{id}"></channel>'.format(id=row['id']))
            ids.append(row['id'])

            if not row.get('events'):
                no_events.append(row['id'])

        log.debug('{} Channels'.format(len(ids)))
        log.debug('No Events: {}'.format(no_events))

        start = arrow.now('Africa/Johannesburg')
        EPG_DAYS = settings.getInt('epg_days', 3)
        WORKERS  = 3

        queue_data   = queue.Queue()
        queue_failed = queue.Queue()
        queue_tasks  = queue.Queue()
        queue_errors = queue.Queue()

        for id in ids:
            queue_tasks.put(id)

        def xml_worker():
            while True:
                id, data = queue_data.get()
                try:
                    process_data(id, data)
                except Exception as e:
                    queue_errors.put(e)
                finally:
                    queue_data.task_done()

        def worker():
            while True:
                id = queue_tasks.get()
                try:
                    data = api.epg(id, start.shift(days=-1), start.shift(days=EPG_DAYS+1), attempts=1)
                    if not data:
                        raise Exception()

                    queue_data.put([id, data])
                except Exception as e:
                    queue_failed.put(id)
                finally:
                    queue_tasks.task_done()

        for i in range(WORKERS):
            thread = threading.Thread(target=worker)
            thread.daemon = True
            thread.start()

        thread = threading.Thread(target=xml_worker)
        thread.daemon = True
        thread.start()

        queue_tasks.join()
        queue_data.join()

        if not queue_errors.empty():
            raise Exception('Error processing data')

        while not queue_failed.empty():
            id = queue_failed.get_nowait()
            data = api.epg(id, start.shift(days=-1), start.shift(days=EPG_DAYS+1), attempts=1 if id in no_events else 10)
            if data:
                process_data(id, data)
            elif id in no_events:
                log.debug('Skipped {}: Expected 0 events'.format(id))
            else:
                raise Exception('Failed {}'.format(id))

        f.write(u'</tv>')
    def epgs(self, refresh=True):
        epg_path = os.path.join(self.output_path, EPG_FILE_NAME)
        working_path = os.path.join(self.working_path, EPG_FILE_NAME)
        epg_path_tmp = os.path.join(self.working_path, EPG_FILE_NAME + '_tmp')

        if not refresh and xbmcvfs.exists(epg_path) and xbmcvfs.exists(
                working_path):
            return working_path

        start_time = time.time()
        database.connect()

        try:
            progress = gui.progressbg() if self.forced else None

            epgs = list(EPG.select().where(EPG.enabled == True).order_by(
                EPG.id))
            EPG.update({
                EPG.start_index: 0,
                EPG.end_index: 0,
                EPG.results: []
            }).where(EPG.enabled == False).execute()

            if settings.getBool('remove_epg_orphans', True):
                epg_ids = Channel.epg_ids()
            else:
                epg_ids = None

            if self._playlist_epgs:
                epg_urls = [x.path.lower() for x in epgs]
                for url in self._playlist_epgs:
                    if url.lower() not in epg_urls:
                        epg = EPG(source_type=EPG.TYPE_URL,
                                  path=url,
                                  enabled=1)
                        epgs.append(epg)
                        epg_urls.append(url.lower())

            with FileIO(epg_path_tmp, 'wb') as _out:
                _out.write(b'<?xml version="1.0" encoding="UTF-8"?><tv>')

                for count, epg in enumerate(epgs):
                    count += 1

                    if progress:
                        progress.update(
                            int(count * (100 / len(epgs))),
                            'Merging EPG ({}/{})'.format(count, len(epgs)),
                            _(epg.label, _bold=True))

                    file_index = _out.tell()

                    epg_start = time.time()
                    try:
                        log.debug('Processing: {}'.format(epg.path))
                        self._process_source(epg, METHOD_EPG, self.tmp_file)
                        with FileIO(self.tmp_file, 'rb') as _in:
                            parser = XMLParser(_out, epg_ids)
                            parser.parse(_in, epg)
                    except Exception as e:
                        log.exception(e)
                        result = [int(time.time()), EPG.ERROR, str(e)]
                    else:
                        result = [
                            int(time.time()), EPG.OK,
                            '{} ({:.2f}s)'.format(parser.epg_count(),
                                                  time.time() - epg_start)
                        ]
                        epg.results.insert(0, result)

                    if result[1] == EPG.ERROR:
                        _seek_file(_out, file_index)

                        if epg.start_index > 0:
                            if copy_partial_data(working_path, _out,
                                                 epg.start_index,
                                                 epg.end_index):
                                log.debug(
                                    'Last used XML data loaded successfully')
                                epg.start_index = file_index
                                epg.end_index = _out.tell()
                            else:
                                log.debug('Failed to load last XML data')
                                epg.start_index = 0
                                epg.end_index = 0
                                _seek_file(_out, file_index)

                        if epg.results and epg.results[0][1] == EPG.ERROR:
                            epg.results[0] = result
                        else:
                            epg.results.insert(0, result)

                    epg.results = epg.results[:3]
                    if epg.id:
                        epg.save()
                    remove_file(self.tmp_file)

                _out.write(b'</tv>')

            remove_file(working_path)
            shutil.move(epg_path_tmp, working_path)

            _safe_copy(working_path, epg_path)
        finally:
            database.close()
            if progress: progress.close()
            remove_file(self.tmp_file)
            remove_file(epg_path_tmp)

        log.debug('EPG Merge Time: {0:.2f}'.format(time.time() - start_time))

        return working_path
    def playlists(self, refresh=True):
        playlist_path = os.path.join(self.output_path, PLAYLIST_FILE_NAME)
        working_path = os.path.join(self.working_path, PLAYLIST_FILE_NAME)

        if not refresh and xbmcvfs.exists(playlist_path) and xbmcvfs.exists(
                working_path):
            return working_path

        start_time = time.time()
        database.connect()

        try:
            progress = gui.progressbg() if self.forced else None

            playlists = list(Playlist.select().where(
                Playlist.enabled == True).order_by(Playlist.order))
            Playlist.update({
                Playlist.results: []
            }).where(Playlist.enabled == False).execute()
            Channel.delete().where(
                Channel.custom == False,
                Channel.playlist.not_in(playlists)).execute()

            for count, playlist in enumerate(playlists):
                count += 1

                if progress:
                    progress.update(
                        int(count * (100 / len(playlists))),
                        'Merging Playlist ({}/{})'.format(
                            count, len(playlists)),
                        _(playlist.label, _bold=True))

                playlist_start = time.time()

                error = None
                try:
                    log.debug('Processing: {}'.format(playlist.path))

                    if playlist.source_type != Playlist.TYPE_CUSTOM:
                        self._process_source(playlist, METHOD_PLAYLIST,
                                             self.tmp_file)

                        with database.db.atomic() as transaction:
                            try:
                                added = self._process_playlist(
                                    playlist, self.tmp_file)
                            except:
                                transaction.rollback()
                                raise
                    else:
                        added = len(playlist.channels)
                except AddonError as e:
                    error = e
                except Error as e:
                    error = e
                    log.exception(e)
                except Exception as e:
                    error = e
                    log.exception(e)
                else:
                    playlist.results.insert(0, [
                        int(time.time()), Playlist.OK,
                        '{} Channels ({:.2f}s)'.format(
                            added,
                            time.time() - playlist_start)
                    ])
                    error = None

                if error:
                    result = [int(time.time()), Playlist.ERROR, str(error)]
                    if playlist.results and playlist.results[0][
                            1] == Playlist.ERROR:
                        playlist.results[0] = result
                    else:
                        playlist.results.insert(0, result)

                remove_file(self.tmp_file)

                playlist.results = playlist.results[:3]
                playlist.save()

            count = 0
            starting_ch_no = settings.getInt('start_ch_no', 1)

            with codecs.open(working_path, 'w', encoding='utf8') as outfile:
                outfile.write(u'#EXTM3U')

                group_order = settings.get('group_order')
                if group_order:
                    outfile.write(u'\n\n#EXTGRP:{}'.format(group_order))

                chno = starting_ch_no
                tv_groups = []
                for channel in Channel.playlist_list(radio=False):
                    if channel.chno is None:
                        channel.chno = chno
                    chno = channel.chno + 1

                    tv_groups.extend(channel.groups)

                    outfile.write(u'\n\n')
                    outfile.write(channel.get_lines())
                    count += 1

                chno = starting_ch_no
                for channel in Channel.playlist_list(radio=True):
                    if channel.chno is None:
                        channel.chno = chno
                    chno = channel.chno + 1

                    new_groups = []
                    for group in channel.groups:
                        count = 1
                        while group in tv_groups:
                            group = _(_.RADIO_GROUP, group=group)
                            if count > 1:
                                group = u'{} #{}'.format(group, count)
                            count += 1
                        new_groups.append(group)

                    channel.groups = new_groups

                    outfile.write(u'\n\n')
                    outfile.write(channel.get_lines())
                    count += 1

                if count == 0:
                    outfile.write(u'\n\n#EXTINF:-1,EMPTY PLAYLIST\nhttp')

            log.debug('Wrote {} Channels'.format(count))
            Playlist.after_merge()
            _safe_copy(working_path, playlist_path)
        finally:
            database.close()
            if progress: progress.close()
            remove_file(self.tmp_file)

        log.debug('Playlist Merge Time: {0:.2f}'.format(time.time() -
                                                        start_time))

        return working_path
Beispiel #15
0
import gpiozero

INSTALLED = False
if SYSTEM == 'mock':
    from gpiozero.pins.mock import MockFactory as Factory
    gpiozero.Device.pin_factory = Factory()
    log.debug('System not supported. Using mock factory')
    INSTALLED = True
else:
    try:
        from gpiozero.pins.rpigpio import RPiGPIOFactory as Factory
        gpiozero.Device.pin_factory = Factory()
        gpiozero.Device.pin_factory.pin(BCM_PINS[0]).state
        INSTALLED = True
    except Exception as e:
        log.exception(e)


def install():
    remove_file(SO_DST)
    shutil.copy(SO_SRC, SO_DST)

    if SYSTEM == 'libreelec':
        install_libreelec()
    elif SYSTEM == 'raspbian':
        install_raspbian()
    elif SYSTEM == 'osmc':
        install_osmc()
        return True
    elif SYSTEM == 'xbian':
        install_xbian()
Beispiel #16
0
def epg(output, **kwargs):
    region = settings.getEnum('region', REGIONS, default=US)

    if region not in (LOCAL, CUSTOM):
        epg_url = MH_EPG_URL.format(region=region)

        try:
            Session().chunked_dl(epg_url, output)
            if epg_url.endswith('.gz'):
                gzip_extract(output)
            return True
        except Exception as e:
            log.exception(e)
            log.debug(
                'Failed to get remote epg: {}. Fall back to scraping'.format(
                    epg_url))

    def process_epg(channels):
        count = 0
        for id in channels:
            channel = channels[id]
            for row in channel.get('programs', []):
                start = arrow.get(row['start']).to('utc')
                stop = arrow.get(row['stop']).to('utc')
                title = row['title']
                description = row['episode']['description']
                subtitle = row['episode']['name']
                category = row['episode']['genre']
                icon = None

                if subtitle.lower().strip() == title.lower().strip():
                    subtitle = None

                f.write(
                    u'<programme channel="{}" start="{}" stop="{}"><title>{}</title><desc>{}</desc>{}{}{}</programme>'
                    .format(
                        id,
                        start.format('YYYYMMDDHHmmss Z'),
                        stop.format('YYYYMMDDHHmmss Z'),
                        escape(title),
                        escape(description),
                        u'<icon src="{}"/>'.format(escape(icon))
                        if icon else '',
                        u'<sub-title>{}</sub-title>'.format(escape(subtitle))
                        if subtitle else '',
                        u'<category>{}</category>'.format(escape(category))
                        if category else '',
                    ))

                count += 1

        return count

    HOUR_SHIFT = 6
    now = arrow.now()
    start = now.replace(minute=0, second=0, microsecond=0).to('utc')
    stop = start.shift(hours=HOUR_SHIFT)
    END_TIME = start.shift(days=settings.getInt('epg_days', 3))

    with codecs.open(output, 'w', encoding='utf8') as f:
        f.write(u'<?xml version="1.0" encoding="utf-8" ?><tv>')

        channels = api.epg(start, stop)
        for id in channels:
            f.write(u'<channel id="{id}"/>'.format(id=id))

        added = process_epg(channels)
        while stop < END_TIME:
            start = stop
            stop = start.shift(hours=HOUR_SHIFT)

            channels = api.epg(start, stop)
            added = process_epg(channels)

            if added <= len(channels):
                break

        f.write(u'</tv>')