def check_alerts(): alerts = userdata.get('alerts', []) if not alerts: return for game in Game.select().where(Game.id << alerts): if game.state == Game.LIVE: alerts.remove(game.id) _to_start = game.start - arrow.utcnow().timestamp if settings.getInt('alert_when') == Alert.STREAM_START: message = _.STREAM_STARTED elif settings.getInt('alert_when') == Alert.KICK_OFF and _to_start > 0 and _to_start <= SERVICE_TIME: message = _.KICKOFF else: continue if settings.getInt('alert_type') == Alert.NOTIFICATION: gui.notification(message, heading=game.title, time=5000, icon=game.image) elif gui.yes_no(message, heading=game.title, yeslabel=_.WATCH, nolabel=_.CLOSE): _get_play_item(game, Game.FULL, play_type=settings.getEnum('live_play_type', PLAY_FROM_TYPES, default=PLAY_FROM_ASK)).play() elif game.state != Game.UPCOMING: alerts.remove(game.id) userdata.set('alerts', alerts)
def _check_updates(): _time = int(time()) if _time < settings.getInt('_last_updates_check', 0) + UPDATES_CHECK_TIME: return settings.setInt('_last_updates_check', _time) new_md5 = session.get(ADDONS_MD5).text.split(' ')[0] if new_md5 == settings.get('addon_md5'): return settings.set('_addon_md5', new_md5) updates = [] slyguy_addons = session.gz_json(ADDONS_URL) slyguy_installed = [x['addonid'] for x in kodi_rpc('Addons.GetAddons', {'installed': True, 'enabled': True})['addons'] if x['addonid'] in slyguy_addons] for addon_id in slyguy_installed: addon = get_addon(addon_id, install=False) if not addon: continue cur_version = addon.getAddonInfo('version') new_version = slyguy_addons[addon_id]['version'] if LooseVersion(cur_version) < LooseVersion(new_version): updates.append([addon_id, cur_version, new_version]) if not updates: return log.debug('Updating repos due to {} addon updates'.format(len(updates))) xbmc.executebuiltin('UpdateAddonRepos')
def search(query=None, page=1, **kwargs): page = int(page) if not query: query = gui.input(_.SEARCH, default=userdata.get('search', '')).strip() if not query: return userdata.set('search', query) folder = plugin.Folder(_(_.SEARCH_FOR, query=query)) if page == 1: items = _parse_creators(api.creators(query=query)) folder.add_items(items) data = api.videos(query=query, page=page, items_per_page=settings.getInt('page_size', 20)) items = _parse_videos(data['response']) folder.add_items(items) if data['pagination']['pages'] > page: folder.add_item( label=_(_(_.NEXT_PAGE, page=page + 1), _bold=True), path=plugin.url_for(search, query=query, page=page + 1), ) return folder
def play_channel(self, id): url = self._config()['LiveTV']['Livestream_endpoint'].format( state = STATES[settings.getInt('state')], channel = id, ) return self._session.get(url).json()[0]['HLSURL']
def live_channels(self): url = self._config()['LiveTV']['Livestream_schedule'].format( state = STATES[settings.getInt('state')], channel = 'all', ) return self._session.get(url).json()
def search_channel(query=None, radio=0, page=1, **kwargs): radio = int(radio) page = int(page) if not query: query = gui.input(_.SEARCH, default=userdata.get('search', '')).strip() if not query: return userdata.set('search', query) folder = plugin.Folder(_(_.SEARCH_FOR, query=query)) page_size = settings.getInt('page_size', 0) db_query = Channel.channel_list(radio=radio, page=page, search=query, page_size=page_size) items = _process_channels(db_query) folder.add_items(items) if len(items) == page_size: folder.add_item( label = _(_.NEXT_PAGE, page=page+1, _bold=True), path = plugin.url_for(search_channel, query=query, radio=radio, page=page+1), ) return folder
def playlist_channels(playlist_id, radio=0, page=1, **kwargs): playlist_id = int(playlist_id) radio = int(radio) page = int(page) playlist = Playlist.get_by_id(playlist_id) folder = plugin.Folder(playlist.label) page_size = settings.getInt('page_size', 0) db_query = Channel.channel_list(playlist_id=playlist_id, radio=radio, page=page, page_size=page_size) items = _process_channels(db_query) folder.add_items(items) if len(items) == page_size: folder.add_item( label = _(_.NEXT_PAGE, page=page+1, _bold=True), path = plugin.url_for(playlist_channels, playlist_id=playlist_id, radio=radio, page=page+1), ) if playlist.source_type == Playlist.TYPE_CUSTOM: folder.add_item( label = _(_.ADD_CHANNEL, _bold=True), path = plugin.url_for(add_channel, playlist_id=playlist_id, radio=radio), ) return folder
def epg(output, **kwargs): with codecs.open(output, 'w', encoding='utf8') as f: f.write(u'<?xml version="1.0" encoding="utf-8" ?><tv>') for row in api.epg(days=settings.getInt('epg_days', 3)): channel = row['Channel'] f.write( u'<channel id="{}"><display-name>{}</display-name><icon src="{}"/></channel>' .format(channel['Id'], escape(channel['Name']), escape(_get_logo(channel['Logo'])))) for program in row['EpgList']: f.write( u'<programme channel="{}" start="{}" stop="{}"><title>{}</title><desc>{}</desc></programme>' .format( channel['Id'], arrow.get( program['StartTime']).format('YYYYMMDDHHmmss Z'), arrow.get( program['EndTime']).format('YYYYMMDDHHmmss Z'), escape(program['Name']), escape(program['Description']))) f.write(u'</tv>')
def search(query, page, **kwargs): page_size = settings.getInt('page_size', 50) items = [] if page == 1: data = api.search_creators(query=query) items = _parse_creators(data['results']) data = api.search_podcasts(query=query) items.extend(_parse_podcast_creators(data['results'])) data = api.search_videos(query=query, page=page, page_size=page_size) items.extend(_parse_videos(data['results'])) return items, data['next']
def my_creators(page=1, **kwargs): page = int(page) page_size = settings.getInt('page_size', 50) data = api.my_creators(page=page, page_size=page_size) folder = plugin.Folder(_.MY_CREATORS) items = _parse_creators(data['results'], following=True) folder.add_items(items) if data['next']: folder.add_item( label=_(_.NEXT_PAGE, page=page + 1), path=plugin.url_for(my_creators, page=page + 1), ) return folder
def description(self): home = self.info['home'] away = self.info['away'] show_hours = settings.getInt('show_hours') if settings.getBool( 'show_score') else -1 result = '' if home['score'] and away['score'] and show_hours != -1 and arrow.now( ) > arrow.get(self.start).shift(hours=show_hours): result = self.result return _(_.GAME_DESC, home_team=home['name'], away_team=away['name'], kick_off=self.kickoff, result=result)
def _check_news(): _time = int(time()) if _time < settings.getInt('_last_news_check', 0) + NEWS_CHECK_TIME: return settings.setInt('_last_news_check', _time) news = Session(timeout=15).gz_json(NEWS_URL) if not news: return if 'id' not in news or news['id'] == settings.get('_last_news_id'): return settings.set('_last_news_id', news['id']) settings.set('_news', json.dumps(news))
def epg(output, **kwargs): channels = api.live_channels() now = arrow.now() until = now.shift(days=settings.getInt('epg_days', 3)) with codecs.open(output, 'w', encoding='utf8') as f: f.write(u'<?xml version="1.0" encoding="utf-8" ?><tv>') for channel in api.live_channels(): if not channel['currentListing'] or ( not channel['dma'] and not channel['currentListing'][-1] ['contentCANVideo'].get('liveStreamingUrl')): continue f.write( u'<channel id="{id}"></channel>'.format(id=channel['slug'])) page = 1 stop = now while stop < until: rows = api.epg(channel['slug'], rows=100, page=page) page += 1 if not rows: break for row in rows: start = arrow.get(row['startTimestamp']) stop = arrow.get(row['endTimestamp']) icon = u'<icon src="{}"/>'.format( config.image(row['filePathThumb']) ) if row['filePathThumb'] else '' desc = u'<desc>{}</desc>'.format(escape( row['description'])) if row['description'] else '' f.write( u'<programme channel="{id}" start="{start}" stop="{stop}"><title>{title}</title>{desc}{icon}</programme>' .format( id=channel['slug'], start=start.format('YYYYMMDDHHmmss Z'), stop=stop.format('YYYYMMDDHHmmss Z'), title=escape(row['title']), desc=desc, icon=icon, )) f.write(u'</tv>')
def epg(output, **kwargs): with codecs.open(output, 'w', encoding='utf8') as f: f.write(u'<?xml version="1.0" encoding="utf-8" ?><tv>') ids = [] for row in api.channels(): if not api.logged_in and not row['isFta']: continue f.write( u'<channel id="{}"><display-name>{}</display-name><icon src="{}"/></channel>' .format(row['idChannel'], escape(row.get('epg_name', row['name'])), escape(row.get('logo')))) ids.append(row['idChannel']) start = arrow.utcnow().shift(hours=-12) end = arrow.utcnow().shift(days=settings.getInt('epg_days', 3)) chunksize = 5 def chunks(lst, n): for i in range(0, len(lst), n): yield lst[i:i + n] for chunk in chunks(ids, chunksize): data = api.epg(chunk, start, end) for channel in data: for event in data[channel]: genre = event.get('genre') f.write( u'<programme channel="{}" start="{}" stop="{}"><title>{}</title><desc>{}</desc>{}</programme>' .format( event['id_channel'], arrow.get( event['startutc']).format('YYYYMMDDHHmmss Z'), arrow.get( event['endutc']).format('YYYYMMDDHHmmss Z'), escape(event.get('title')), escape(event.get('synopsis')), u'<category>{}</category>'.format(escape(genre)) if genre else '', )) f.write(u'</tv>')
def podcasts(slug, page=1, **kwargs): page = int(page) page_size = settings.getInt('page_size', 50) data = api.podcasts(slug, page=page, page_size=page_size) folder = plugin.Folder(data['details']['title']) items = _parse_podcasts(data['episodes']['results']) folder.add_items(items) if data['episodes']['next']: folder.add_item( label=_(_.NEXT_PAGE, page=page + 1), path=plugin.url_for(podcasts, slug=slug, page=page + 1), ) return folder
def channels(radio=0, page=1, **kwargs): folder = plugin.Folder(_.ALL_CHANNELS) radio = int(radio) page = int(page) page_size = settings.getInt('page_size', 0) query = Channel.channel_list(radio=radio, page=page, page_size=page_size) items = _process_channels(query) folder.add_items(items) if len(items) == page_size: folder.add_item( label = _(_.NEXT_PAGE, page=page+1, _bold=True), path = plugin.url_for(channels, radio=radio, page=page+1), ) return folder
def podcast_creators(category=None, title=None, page=1, **kwargs): page = int(page) page_size = settings.getInt('page_size', 50) if category is None: folder = plugin.Folder(_.PODCASTS) folder.add_item( label=_.EVERYTHING, path=plugin.url_for(podcast_creators, category='', title=_.EVERYTHING), ) for row in api.podcast_categories(): folder.add_item( label=row['title'], art={'thumb': row['assets']['avatar-big-light']}, path=plugin.url_for(podcast_creators, category=row['slug'], title=row['title']), ) return folder folder = plugin.Folder(title) data = api.podcast_creators(category=category, page=page, page_size=page_size) items = _parse_podcast_creators(data['results']) folder.add_items(items) if data['next']: folder.add_item( label=_(_.NEXT_PAGE, page=page + 1), path=plugin.url_for(podcast_creators, category=category, title=title, page=page + 1), ) return folder
def creator_list(slug, page=1, **kwargs): page = int(page) creator = _get_creator(slug) folder = plugin.Folder(creator['title'], fanart=creator['banner']) data = api.videos(creator['playlist_id'], page=page, items_per_page=settings.getInt('page_size', 20)) items = _parse_videos(data['response'], creator=creator) folder.add_items(items) if data['pagination']['pages'] > page: folder.add_item( label=_(_(_.NEXT_PAGE, page=page + 1), _bold=True), path=plugin.url_for(creator_list, slug=slug, page=page + 1), ) return folder
def creator_videos(slug, page=1, **kwargs): page = int(page) page_size = settings.getInt('page_size', 50) data = api.creator(slug, page=page, page_size=page_size) folder = plugin.Folder( data['details']['title'], fanart=data['details']['assets']['avatar']['512']['original']) items = _parse_videos(data['episodes']['results'], creator_page=True) folder.add_items(items) if data['episodes']['next']: folder.add_item( label=_(_.NEXT_PAGE, page=page + 1), path=plugin.url_for(creator_videos, slug=slug, page=page + 1), ) return folder
def playlist(label, playlist_id=None, page=1, **kwargs): page = int(page) folder = plugin.Folder(label) data = api.videos(playlist_id, page=page, items_per_page=settings.getInt('page_size', 20)) items = _parse_videos(data['response']) folder.add_items(items) if data['pagination']['pages'] > page: folder.add_item( label=_(_(_.NEXT_PAGE, page=page + 1), _bold=True), path=plugin.url_for(playlist, label=label, playlist_id=playlist_id, page=page + 1), ) return folder
def my_library(**kwargs): folder = plugin.Folder(_.MY_LIBRARY, cacheToDisc=False, no_items_method='list') creators = api.creators() def get_creator(content_id): for creator in creators: if creator['_id'] == content_id: return creator tasks = [] _creators = [] for row in api.following(): _creator = get_creator(row['channel']) if _creator: _creators.append(_creator) task = lambda x=_creator['playlist_id']: api.videos( x, items_per_page=settings.getInt('my_library_vids_per_creator', 10)) tasks.append(task) if settings.getBool('my_library_show_creators', True): items = _parse_creators(_creators, following=True) folder.add_items(items) videos = [] for result in async_tasks(tasks, workers=10): videos.extend(result['response']) items = _parse_videos(videos, following=True) items = sorted(items, key=lambda x: x.custom['published'], reverse=True) folder.add_items(items) return folder
def epg(output, **kwargs): region = settings.getEnum('region', REGIONS, default=US) if region not in (LOCAL, CUSTOM): epg_url = MH_EPG_URL.format(region=region) try: Session().chunked_dl(epg_url, output) if epg_url.endswith('.gz'): gzip_extract(output) return True except Exception as e: log.exception(e) log.debug( 'Failed to get remote epg: {}. Fall back to scraping'.format( epg_url)) def process_epg(channels): count = 0 for id in channels: channel = channels[id] for row in channel.get('programs', []): start = arrow.get(row['start']).to('utc') stop = arrow.get(row['stop']).to('utc') title = row['title'] description = row['episode']['description'] subtitle = row['episode']['name'] category = row['episode']['genre'] icon = None if subtitle.lower().strip() == title.lower().strip(): subtitle = None f.write( u'<programme channel="{}" start="{}" stop="{}"><title>{}</title><desc>{}</desc>{}{}{}</programme>' .format( id, start.format('YYYYMMDDHHmmss Z'), stop.format('YYYYMMDDHHmmss Z'), escape(title), escape(description), u'<icon src="{}"/>'.format(escape(icon)) if icon else '', u'<sub-title>{}</sub-title>'.format(escape(subtitle)) if subtitle else '', u'<category>{}</category>'.format(escape(category)) if category else '', )) count += 1 return count HOUR_SHIFT = 6 now = arrow.now() start = now.replace(minute=0, second=0, microsecond=0).to('utc') stop = start.shift(hours=HOUR_SHIFT) END_TIME = start.shift(days=settings.getInt('epg_days', 3)) with codecs.open(output, 'w', encoding='utf8') as f: f.write(u'<?xml version="1.0" encoding="utf-8" ?><tv>') channels = api.epg(start, stop) for id in channels: f.write(u'<channel id="{id}"/>'.format(id=id)) added = process_epg(channels) while stop < END_TIME: start = stop stop = start.shift(hours=HOUR_SHIFT) channels = api.epg(start, stop) added = process_epg(channels) if added <= len(channels): break f.write(u'</tv>')
def get_region(): return REGIONS[settings.getInt('region_index')]
def playlists(self, refresh=True): playlist_path = os.path.join(self.output_path, PLAYLIST_FILE_NAME) working_path = os.path.join(self.working_path, PLAYLIST_FILE_NAME) if not refresh and xbmcvfs.exists(playlist_path) and xbmcvfs.exists( working_path): return working_path start_time = time.time() database.connect() try: progress = gui.progressbg() if self.forced else None playlists = list(Playlist.select().where( Playlist.enabled == True).order_by(Playlist.order)) Playlist.update({ Playlist.results: [] }).where(Playlist.enabled == False).execute() Channel.delete().where( Channel.custom == False, Channel.playlist.not_in(playlists)).execute() for count, playlist in enumerate(playlists): count += 1 if progress: progress.update( int(count * (100 / len(playlists))), 'Merging Playlist ({}/{})'.format( count, len(playlists)), _(playlist.label, _bold=True)) playlist_start = time.time() error = None try: log.debug('Processing: {}'.format(playlist.path)) if playlist.source_type != Playlist.TYPE_CUSTOM: self._process_source(playlist, METHOD_PLAYLIST, self.tmp_file) with database.db.atomic() as transaction: try: added = self._process_playlist( playlist, self.tmp_file) except: transaction.rollback() raise else: added = len(playlist.channels) except AddonError as e: error = e except Error as e: error = e log.exception(e) except Exception as e: error = e log.exception(e) else: playlist.results.insert(0, [ int(time.time()), Playlist.OK, '{} Channels ({:.2f}s)'.format( added, time.time() - playlist_start) ]) error = None if error: result = [int(time.time()), Playlist.ERROR, str(error)] if playlist.results and playlist.results[0][ 1] == Playlist.ERROR: playlist.results[0] = result else: playlist.results.insert(0, result) remove_file(self.tmp_file) playlist.results = playlist.results[:3] playlist.save() count = 0 starting_ch_no = settings.getInt('start_ch_no', 1) with codecs.open(working_path, 'w', encoding='utf8') as outfile: outfile.write(u'#EXTM3U') group_order = settings.get('group_order') if group_order: outfile.write(u'\n\n#EXTGRP:{}'.format(group_order)) chno = starting_ch_no tv_groups = [] for channel in Channel.playlist_list(radio=False): if channel.chno is None: channel.chno = chno chno = channel.chno + 1 tv_groups.extend(channel.groups) outfile.write(u'\n\n') outfile.write(channel.get_lines()) count += 1 chno = starting_ch_no for channel in Channel.playlist_list(radio=True): if channel.chno is None: channel.chno = chno chno = channel.chno + 1 new_groups = [] for group in channel.groups: count = 1 while group in tv_groups: group = _(_.RADIO_GROUP, group=group) if count > 1: group = u'{} #{}'.format(group, count) count += 1 new_groups.append(group) channel.groups = new_groups outfile.write(u'\n\n') outfile.write(channel.get_lines()) count += 1 if count == 0: outfile.write(u'\n\n#EXTINF:-1,EMPTY PLAYLIST\nhttp') log.debug('Wrote {} Channels'.format(count)) Playlist.after_merge() _safe_copy(working_path, playlist_path) finally: database.close() if progress: progress.close() remove_file(self.tmp_file) log.debug('Playlist Merge Time: {0:.2f}'.format(time.time() - start_time)) return working_path
def epg(output, **kwargs): country = userdata.get('country', DEFAULT_COUNTRY) epg_url = EPG_URLS.get(country) if epg_url: try: Session().chunked_dl(epg_url, output) if epg_url.endswith('.gz'): gzip_extract(output) return True except Exception as e: log.exception(e) log.debug('Failed to get remote epg: {}. Fall back to scraping'.format(epg_url)) with codecs.open(output, 'w', encoding='utf8') as f: f.write(u'<?xml version="1.0" encoding="utf-8" ?><tv>') def process_data(id, data): program_count = 0 for event in data: channel = event['channelTag'] start = arrow.get(event['startDateTime']).to('utc') stop = arrow.get(event['endDateTime']).to('utc') title = event.get('title') subtitle = event.get('episodeTitle') series = event.get('seasonNumber') episode = event.get('episodeNumber') desc = event.get('longSynopsis') icon = event.get('thumbnailImagePaths', {}).get('THUMB') icon = u'<icon src="{}"/>'.format(icon) if icon else '' episode = u'<episode-num system="onscreen">S{}E{}</episode-num>'.format(series, episode) if series and episode else '' subtitle = u'<sub-title>{}</sub-title>'.format(escape(subtitle)) if subtitle else '' f.write(u'<programme channel="{id}" start="{start}" stop="{stop}"><title>{title}</title>{subtitle}{icon}{episode}<desc>{desc}</desc></programme>'.format( id=channel, start=start.format('YYYYMMDDHHmmss Z'), stop=stop.format('YYYYMMDDHHmmss Z'), title=escape(title), subtitle=subtitle, episode=episode, icon=icon, desc=escape(desc))) ids = [] no_events = [] for row in api.channels(): f.write(u'<channel id="{id}"></channel>'.format(id=row['id'])) ids.append(row['id']) if not row.get('events'): no_events.append(row['id']) log.debug('{} Channels'.format(len(ids))) log.debug('No Events: {}'.format(no_events)) start = arrow.now('Africa/Johannesburg') EPG_DAYS = settings.getInt('epg_days', 3) WORKERS = 3 queue_data = queue.Queue() queue_failed = queue.Queue() queue_tasks = queue.Queue() queue_errors = queue.Queue() for id in ids: queue_tasks.put(id) def xml_worker(): while True: id, data = queue_data.get() try: process_data(id, data) except Exception as e: queue_errors.put(e) finally: queue_data.task_done() def worker(): while True: id = queue_tasks.get() try: data = api.epg(id, start.shift(days=-1), start.shift(days=EPG_DAYS+1), attempts=1) if not data: raise Exception() queue_data.put([id, data]) except Exception as e: queue_failed.put(id) finally: queue_tasks.task_done() for i in range(WORKERS): thread = threading.Thread(target=worker) thread.daemon = True thread.start() thread = threading.Thread(target=xml_worker) thread.daemon = True thread.start() queue_tasks.join() queue_data.join() if not queue_errors.empty(): raise Exception('Error processing data') while not queue_failed.empty(): id = queue_failed.get_nowait() data = api.epg(id, start.shift(days=-1), start.shift(days=EPG_DAYS+1), attempts=1 if id in no_events else 10) if data: process_data(id, data) elif id in no_events: log.debug('Skipped {}: Expected 0 events'.format(id)) else: raise Exception('Failed {}'.format(id)) f.write(u'</tv>')
def videos(self, video_ids): config = self._config() url = config['endpoints']['videos']['server'] + config['endpoints']['videos']['methods']['getVideobyIDs'] url = url.replace('[ids]', ','.join(str(x) for x in video_ids)).replace('[state]', STATES[settings.getInt('state')]) return self._session.get(url).json()['items']
def start(): monitor = xbmc.Monitor() restart_queued = False boot_merge = settings.getBool('boot_merge', False) set_kodi_string('_iptv_merge_force_run') while not monitor.waitForAbort(1): forced = ADDON_DEV or get_kodi_string('_iptv_merge_force_run') or 0 if forced or boot_merge or ( settings.getBool('auto_merge', True) and time.time() - userdata.get('last_run', 0) > settings.getInt('reload_time_hours', 12) * 3600): set_kodi_string('_iptv_merge_force_run', '1') url = router.url_for('service_merge', forced=forced) dirs, files = xbmcvfs.listdir(url) msg = unquote(files[0]) if msg == 'ok': restart_queued = True userdata.set('last_run', int(time.time())) set_kodi_string('_iptv_merge_force_run') if restart_queued and settings.getBool('restart_pvr', False): if forced: progress = gui.progressbg( heading='Reloading IPTV Simple Client') if KODI_VERSION > 18: restart_queued = False try: xbmcaddon.Addon(IPTV_SIMPLE_ID).setSetting( 'm3uPathType', '0') except Exception as e: pass elif forced or (not xbmc.getCondVisibility('Pvr.IsPlayingTv') and not xbmc.getCondVisibility('Pvr.IsPlayingRadio')): restart_queued = False kodi_rpc('Addons.SetAddonEnabled', { 'addonid': IPTV_SIMPLE_ID, 'enabled': False }) wait_delay = 4 for i in range(wait_delay): if monitor.waitForAbort(1): break if forced: progress.update((i + 1) * int(100 / wait_delay)) kodi_rpc('Addons.SetAddonEnabled', { 'addonid': IPTV_SIMPLE_ID, 'enabled': True }) if forced: progress.update(100) progress.close() boot_merge = False if ADDON_DEV: break