Ejemplo n.º 1
0
def start():
    http = HTTP()

    monitor = xbmc.Monitor()
    restart_queued = False

    boot_merge = settings.getBool('boot_merge', False)
    set_kodi_string('_iptv_merge_force_run')

    while not monitor.waitForAbort(1):
        http.start() if settings.getBool('http_api', False) else http.stop()

        forced = get_kodi_string('_iptv_merge_force_run') or 0

        if forced or boot_merge or (settings.getBool('auto_merge', True) and time.time() - userdata.get('last_run', 0) > settings.getInt('reload_time_hours', 12) * 3600):
            set_kodi_string('_iptv_merge_force_run', '1')

            url = router.url_for('run_merge', forced=int(forced))
            dirs, files = xbmcvfs.listdir(url)
            result, msg = int(files[0][0]), unquote_plus(files[0][1:])
            if result:
                restart_queued = True

            userdata.set('last_run', int(time.time()))
            set_kodi_string('_iptv_merge_force_run')

        if restart_queued and settings.getBool('restart_pvr', False):
            if forced: progress = gui.progressbg(heading='Reloading IPTV Simple Client')

            if KODI_VERSION > 18:
                restart_queued = False
                try: xbmcaddon.Addon(IPTV_SIMPLE_ID).setSetting('m3uPathType', '0')
                except Exception as e: pass

            elif forced or (not xbmc.getCondVisibility('Pvr.IsPlayingTv') and not xbmc.getCondVisibility('Pvr.IsPlayingRadio')):
                restart_queued = False
                kodi_rpc('Addons.SetAddonEnabled', {'addonid': IPTV_SIMPLE_ID, 'enabled': False})

                wait_delay = 4
                for i in range(wait_delay):
                    if monitor.waitForAbort(1):
                        break
                    if forced: progress.update((i+1)*int(100/wait_delay))

                kodi_rpc('Addons.SetAddonEnabled', {'addonid': IPTV_SIMPLE_ID, 'enabled': True})

            if forced:
                progress.update(100)
                progress.close()

        boot_merge = False

    http.stop()
    def epgs(self, refresh=True):
        epg_path = os.path.join(self.output_path, EPG_FILE_NAME)
        working_path = os.path.join(self.working_path, EPG_FILE_NAME)
        epg_path_tmp = os.path.join(self.working_path, EPG_FILE_NAME + '_tmp')

        if not refresh and xbmcvfs.exists(epg_path) and xbmcvfs.exists(
                working_path):
            return working_path

        start_time = time.time()
        database.connect()

        try:
            progress = gui.progressbg() if self.forced else None

            epgs = list(EPG.select().where(EPG.enabled == True).order_by(
                EPG.id))
            EPG.update({
                EPG.start_index: 0,
                EPG.end_index: 0,
                EPG.results: []
            }).where(EPG.enabled == False).execute()

            if settings.getBool('remove_epg_orphans', True):
                epg_ids = Channel.epg_ids()
            else:
                epg_ids = None

            if self._playlist_epgs:
                epg_urls = [x.path.lower() for x in epgs]
                for url in self._playlist_epgs:
                    if url.lower() not in epg_urls:
                        epg = EPG(source_type=EPG.TYPE_URL,
                                  path=url,
                                  enabled=1)
                        epgs.append(epg)
                        epg_urls.append(url.lower())

            with FileIO(epg_path_tmp, 'wb') as _out:
                _out.write(b'<?xml version="1.0" encoding="UTF-8"?><tv>')

                for count, epg in enumerate(epgs):
                    count += 1

                    if progress:
                        progress.update(
                            int(count * (100 / len(epgs))),
                            'Merging EPG ({}/{})'.format(count, len(epgs)),
                            _(epg.label, _bold=True))

                    file_index = _out.tell()

                    epg_start = time.time()
                    try:
                        log.debug('Processing: {}'.format(epg.path))
                        self._process_source(epg, METHOD_EPG, self.tmp_file)
                        with FileIO(self.tmp_file, 'rb') as _in:
                            parser = XMLParser(_out, epg_ids)
                            parser.parse(_in, epg)
                    except Exception as e:
                        log.exception(e)
                        result = [int(time.time()), EPG.ERROR, str(e)]
                    else:
                        result = [
                            int(time.time()), EPG.OK,
                            '{} ({:.2f}s)'.format(parser.epg_count(),
                                                  time.time() - epg_start)
                        ]
                        epg.results.insert(0, result)

                    if result[1] == EPG.ERROR:
                        _seek_file(_out, file_index)

                        if epg.start_index > 0:
                            if copy_partial_data(working_path, _out,
                                                 epg.start_index,
                                                 epg.end_index):
                                log.debug(
                                    'Last used XML data loaded successfully')
                                epg.start_index = file_index
                                epg.end_index = _out.tell()
                            else:
                                log.debug('Failed to load last XML data')
                                epg.start_index = 0
                                epg.end_index = 0
                                _seek_file(_out, file_index)

                        if epg.results and epg.results[0][1] == EPG.ERROR:
                            epg.results[0] = result
                        else:
                            epg.results.insert(0, result)

                    epg.results = epg.results[:3]
                    if epg.id:
                        epg.save()
                    remove_file(self.tmp_file)

                _out.write(b'</tv>')

            remove_file(working_path)
            shutil.move(epg_path_tmp, working_path)

            _safe_copy(working_path, epg_path)
        finally:
            database.close()
            if progress: progress.close()
            remove_file(self.tmp_file)
            remove_file(epg_path_tmp)

        log.debug('EPG Merge Time: {0:.2f}'.format(time.time() - start_time))

        return working_path
    def playlists(self, refresh=True):
        playlist_path = os.path.join(self.output_path, PLAYLIST_FILE_NAME)
        working_path = os.path.join(self.working_path, PLAYLIST_FILE_NAME)

        if not refresh and xbmcvfs.exists(playlist_path) and xbmcvfs.exists(
                working_path):
            return working_path

        start_time = time.time()
        database.connect()

        try:
            progress = gui.progressbg() if self.forced else None

            playlists = list(Playlist.select().where(
                Playlist.enabled == True).order_by(Playlist.order))
            Playlist.update({
                Playlist.results: []
            }).where(Playlist.enabled == False).execute()
            Channel.delete().where(
                Channel.custom == False,
                Channel.playlist.not_in(playlists)).execute()

            for count, playlist in enumerate(playlists):
                count += 1

                if progress:
                    progress.update(
                        int(count * (100 / len(playlists))),
                        'Merging Playlist ({}/{})'.format(
                            count, len(playlists)),
                        _(playlist.label, _bold=True))

                playlist_start = time.time()

                error = None
                try:
                    log.debug('Processing: {}'.format(playlist.path))

                    if playlist.source_type != Playlist.TYPE_CUSTOM:
                        self._process_source(playlist, METHOD_PLAYLIST,
                                             self.tmp_file)

                        with database.db.atomic() as transaction:
                            try:
                                added = self._process_playlist(
                                    playlist, self.tmp_file)
                            except:
                                transaction.rollback()
                                raise
                    else:
                        added = len(playlist.channels)
                except AddonError as e:
                    error = e
                except Error as e:
                    error = e
                    log.exception(e)
                except Exception as e:
                    error = e
                    log.exception(e)
                else:
                    playlist.results.insert(0, [
                        int(time.time()), Playlist.OK,
                        '{} Channels ({:.2f}s)'.format(
                            added,
                            time.time() - playlist_start)
                    ])
                    error = None

                if error:
                    result = [int(time.time()), Playlist.ERROR, str(error)]
                    if playlist.results and playlist.results[0][
                            1] == Playlist.ERROR:
                        playlist.results[0] = result
                    else:
                        playlist.results.insert(0, result)

                remove_file(self.tmp_file)

                playlist.results = playlist.results[:3]
                playlist.save()

            count = 0
            starting_ch_no = settings.getInt('start_ch_no', 1)

            with codecs.open(working_path, 'w', encoding='utf8') as outfile:
                outfile.write(u'#EXTM3U')

                group_order = settings.get('group_order')
                if group_order:
                    outfile.write(u'\n\n#EXTGRP:{}'.format(group_order))

                chno = starting_ch_no
                tv_groups = []
                for channel in Channel.playlist_list(radio=False):
                    if channel.chno is None:
                        channel.chno = chno
                    chno = channel.chno + 1

                    tv_groups.extend(channel.groups)

                    outfile.write(u'\n\n')
                    outfile.write(channel.get_lines())
                    count += 1

                chno = starting_ch_no
                for channel in Channel.playlist_list(radio=True):
                    if channel.chno is None:
                        channel.chno = chno
                    chno = channel.chno + 1

                    new_groups = []
                    for group in channel.groups:
                        count = 1
                        while group in tv_groups:
                            group = _(_.RADIO_GROUP, group=group)
                            if count > 1:
                                group = u'{} #{}'.format(group, count)
                            count += 1
                        new_groups.append(group)

                    channel.groups = new_groups

                    outfile.write(u'\n\n')
                    outfile.write(channel.get_lines())
                    count += 1

                if count == 0:
                    outfile.write(u'\n\n#EXTINF:-1,EMPTY PLAYLIST\nhttp')

            log.debug('Wrote {} Channels'.format(count))
            Playlist.after_merge()
            _safe_copy(working_path, playlist_path)
        finally:
            database.close()
            if progress: progress.close()
            remove_file(self.tmp_file)

        log.debug('Playlist Merge Time: {0:.2f}'.format(time.time() -
                                                        start_time))

        return working_path