def get_pagination(current_page, total_pages, action, params): items = [] current_page = int(current_page) total_pages = int(total_pages) if total_pages < current_page: current_page = total_pages if current_page > 1: items.append({ 'label': App.replace_html_codes( '%s Предыдущая страница'.decode('utf-8') % App.format_bold('<')), 'icon': App.get_media('prev'), 'url': P.get_url(action=action, id=params.id, page=current_page - 1) }) if current_page < total_pages: items.append({ 'label': App.replace_html_codes( '%s Следующая страница'.decode('utf-8') % App.format_bold('>')), 'icon': App.get_media('next'), 'url': P.get_url(action=action, id=params.id, page=current_page + 1) }) return items
def ns_serials_by_genre(params): items = [] P.log_error(params.url) content = App.http_request(URL + params.url) if content: html = BeautifulSoup(content, 'html.parser') block = html.find(class_='serials-list') for serial in block.find_all('li'): link = serial.find('a') img = link.find('img') url = link.get('href') cover = img.get('src') label = img.get('title').strip() items.append({ 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='ns_serial_seasons', url=url) }) return App.create_listing(items, content='tvshows')
def ts_index(params): items = [] content = App.http_request(URL + '/show') if content: items.append({ 'label': '[ Поиск ]', 'url': P.get_url(action='ts_search'), 'icon': App.get_media('find') }) items.append({ 'label': App.format_bold('Последние поступления'), 'url': P.get_url(action='ts_last_added'), }) html = BeautifulSoup(content, 'html.parser') categories = html.find(id='filter-category') for option in categories.find_all('option'): id = option.get('value') if id == '0': continue label = option.get_text() items.append({ 'label': label, 'url': P.get_url(action='ts_category', id=id) }) return App.create_listing(items)
def ns_index(params): items = [{ 'label': '[ Поиск ]', 'icon': App.get_media('find'), 'url': P.get_url(action='ns_search') }, { 'label': App.format_bold('Популярные'), 'url': P.get_url(action='ns_top') }, { 'label': App.format_bold('Новые серии'), 'url': P.get_url(action='ns_new_episodes') }, { 'label': App.format_bold('Новые сериалы'), 'url': P.get_url(action='ns_new_serials') }] content = App.http_request(URL) if content: html = BeautifulSoup(content, 'html.parser') block = html.find(class_='categories-menu') if block is not None: for genre in block.find_all('a'): label = App.bs_get_text(genre) url = genre.get('href').split('=') url[1] = urllib.quote(url[1].encode('utf-8')) url = '='.join(url) items.append({ 'label': label, 'url': P.get_url(action='ns_serials_by_genre', url=url) }) return items
def ts_search(params): items = [] query = App.keyboard(heading='Поиск') if query is None: pass elif query != '': query = query.decode('utf-8').lower() tvshows = App.http_request('%s/show/search/%s' % (URL, query)) if tvshows is not None: tvshows = sorted(json.loads(tvshows), key=itemgetter('name')) for tvshow in tvshows: try: name = tvshow['name'].encode('utf-8') url = tvshow['url'] items.append({ 'label': name, 'url': P.get_url(action='ts_tvshow_seasons', href=url) }) except: P.log_error(traceback.format_exc()) if len(items) == 0: App.noty('no_search_results') else: App.noty('no_search_results') return App.create_listing(items, content='tvshows')
def ts_tvshow_season_episodes(params): items = [] xbmc.executebuiltin('ActivateWindow(busydialog)') for id in params.episodes_ids.split(','): try: (resp_headers, content) = H.request( '{0}/show/episode/episode.json?episode={1}'.format(URL, id), 'GET', headers={ 'X-Requested-With': 'XMLHttpRequest', 'User-Agent': App.USER_AGENT }) if resp_headers.status == 200: episode = json.loads(content) label = episode['fullname'] if episode['name'] is not None: label += App.replace_html_codes( ' ' + App.format_bold(episode['name'])) duration = episode['duration'] hd = episode['video']['files']['HD']['url'] sd = episode['video']['files']['SD']['url'] subtitles = episode['subtitles'] items.append({ 'label': label, 'stream_info': { 'video': { 'duration': duration } }, 'subtitles': [URL + subtitles if subtitles is not False else ''], 'mime': episode['video']['mimetype'], 'url': hd if hd is not None else sd, 'is_playable': True }) except: P.log_error(traceback.format_exc()) xbmc.executebuiltin('Dialog.Close(busydialog)') if len(items) > 0: App.create_playlist(items) xbmc.executebuiltin('ActivateWindow(VideoPlaylist)') else: App.noty('playlist_empty')
def ns_serial_seasons(params): items = [] content = App.http_request(URL + params.url) if content: html = BeautifulSoup(content, 'html.parser') name = App.bs_get_text(html.find('h1')).strip() description = html.find(class_='serial-description') year = '' plot = App.STR_NO_DATA cover = '' if description is not None: cover = description.find('img').get('src') description_text = App.bs_get_text(description.find(class_='text')).encode('utf-8') year = re_compile_or_no_data('Год(.+?)\n', description_text) country = re_compile_or_no_data('Страна(.+?)\n', description_text) if country == App.STR_NO_DATA: country = re_compile_or_no_data('Производство(.+?)\n', description_text) genre = re_compile_or_no_data('Жанр(.+?)\n', description_text) director = re_compile_or_no_data('Режиссер(.+?)\n', description_text) plot = re_compile_or_no_data('\n(.+?)\n \n', description_text) plot = App.format_description(description=plot, country=country, genre=genre, director=director) season_headers = html.find_all(class_='panel-title') items.append({ 'label': App.replace_html_codes('%s Все сезоны'.decode('utf-8') % App.format_bold(name)), 'thumb': cover, 'art': { 'poster': cover }, 'info': { 'video': { 'plot': plot, 'year': year } }, 'url': P.get_url(action='ns_serial_season_episodes', url=params.url, season_number='-1') }) for season_header in season_headers: label = App.bs_get_text(season_header.find('h2')) season_number = label.split(' ')[1] items.append({ 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='ns_serial_season_episodes', url=params.url, season_number=season_number) }) return App.create_listing(items, content='tvshows')
def nm_movie(params): items = [] content = App.http_request('%s/movie/watch.php?%s' % (URL, urllib.urlencode({'id': params.id}))) if content: html = BeautifulSoup(content, 'html.parser') cover = html.find(class_='movie-cover') if cover is not None: cover = cover.find('img').get('src') description = App.bs_get_text_with_newlines( html.find(class_='description-text')) flashvars = re.compile( '<param value="config=.+?__(.+?)" name="flashvars">').findall( content) if flashvars: video_id = flashvars[0] items.append(create_movie_item(video_id, cover)) else: video = html.find('source', {'type': 'video/mp4'}) if video is not None: title = html.find(class_='panel-title') if title is not None: title = title.get_text().split('/', 1) P.log_error(title) if len(title) == 2: title = title[1] else: title = title[0] else: title = 'Просмотр' items.append({ 'label': App.remove_double_spaces(title), 'thumb': cover, 'art': { 'poster': cover }, 'info': { 'video': { 'plot': '' if App.get_skin_name() == 'skin.confluence' else description, } }, 'url': video.get('src'), 'is_playable': True, }) else: App.notification('Ошибка', 'Фильм не найден', 'info') return App.create_listing(items, content='movies')
def get_news_item_badge(item): result = '' badge = item.find(class_='label') if badge is not None: try: text = badge.get_text() color = App.get_color(badge.get('class')[1]) result = App.format_color(text, color) except: P.log_error(traceback.format_exc()) return result
def nm_index(params): return [{ 'label': '[ Поиск ]', 'icon': App.get_media('find'), 'url': P.get_url(action='nm_search') }, { 'label': 'Популярные', 'url': P.get_url(action='nm_top') }, { 'label': 'По жанрам', 'url': P.get_url(action='nm_genres') }]
def get_pagination(current, total, size, offset, params): items = [] current = int(current) total = int(total) size = int(size) offset = int(offset) pages = { 'total': total if (size == 0 and offset == 0) else int(math.ceil(total / size + 1)), 'current': current if (current > 0) else 1 } if pages['total'] < pages['current']: pages['current'] = pages['total'] if pages['current'] > 1: items.append({ 'label': App.replace_html_codes( '[B]<[/B] Предыдущая страница'.decode('utf-8')), 'icon': App.get_media('prev'), 'url': P.get_url(action='co_movies', order_id=params.order_id, genre_id=params.genre_id, page=pages['current'] - 1 - offset) }) if pages['current'] < pages['total']: items.append({ 'label': App.replace_html_codes( '[B]>[/B] Следующая страница'.decode('utf-8')), 'icon': App.get_media('next'), 'url': P.get_url(action='co_movies', order_id=params.order_id, genre_id=params.genre_id, page=pages['current'] + 1 - offset) }) pages = { 'total': int(math.ceil(total / size + 1)), 'current': offset / size + 1 } return items
def co_bestsellers(params): items = [] content = App.http_request(API, 'POST', {'action[0]': 'Video.getBestsellers'}) if content: data = json.loads(content)['json'][0]['response'] for category in data['bestsellers']: for movie in category['movies']: cover = get_bigger_cover(movie['cover']) items.append({ 'label': App.replace_html_codes('[B]%s[/B] %s' % (movie['name'], category['name'])), 'thumb': cover, 'fanart': cover, 'art': { 'poster': cover }, 'info': { 'video': { 'year': movie['year'], } }, 'url': P.get_url(action='co_movie', id=movie['movie_id']) }) return App.create_listing(items, content='movies')
def co_search(params): items = [] query = App.keyboard(heading='Поиск') if query is None: pass elif query != '': content = App.http_request(URL + '/suggestion.php?' + urllib.urlencode({'q': query})) if content: movies = json.loads(content)['json'][0]['response']['movies'] for movie in movies: cover = get_bigger_cover(movie['cover']) items.append({ 'label': movie['name'], 'thumb': cover, 'art': { 'poster': cover }, 'info': { 'video': { 'year': movie['year'], } }, 'url': P.get_url(action='co_movie', id=movie['movie_id']) }) if len(items) == 0: App.noty('no_search_results') else: App.noty('no_search_results') return App.create_listing(items, content='movies')
def nm_top(params): items = [] content = App.http_request(URL + '/movie/') if content: html = BeautifulSoup(content, 'html.parser') block = html.find(class_='charts-block') if block is not None: for movie in block.find_all(class_='changed'): link = movie.find('a') img = link.find('img') id = link.get('href').split('=')[1] cover = img.get('src') label = img.get('title') items.append({ 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='nm_movie', id=id) }) return App.create_listing(items, content='movies')
def ns_search(params): items = [] query = App.keyboard(heading='Поиск') if query is None: pass elif query != '': query = query.decode('utf-8').lower() serials = get_serials_list() for serial in serials: if serial['title'].lower().find(query) > -1: cover = serial['cover'] items.append({ 'label': serial['title'], 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='ns_serial_seasons', url=serial['url']) }) if len(items) == 0: App.noty('no_search_results') else: App.noty('no_search_results') return App.create_listing(items, content='tvshows')
def get_serials_from_index_page(block_class): items = [] content = App.http_request(URL) if content: html = BeautifulSoup(content, 'html.parser') block = html.find(class_=block_class) if block is not None: for serial in block.find_all('li'): link = serial.find('a') img = link.find('img') url = link.get('href') cover = img.get('src') label = img.get('title') items.append({ 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='ns_serial_seasons', url=url) }) return App.create_listing(items, content='tvshows')
def get_pagination(current, total, size, offset, params): items = [] current = int(current) total = int(total) size = int(size) offset = int(offset) pages = { 'total': total if (size == 0 and offset == 0) else int(math.ceil(total / size + 1)), 'current': current if (current > 0) else 1 } if pages['total'] < pages['current']: pages['current'] = pages['total'] if pages['current'] > 1: items.append({ 'label': App.replace_html_codes('[B]<[/B] Предыдущая страница'.decode('utf-8')), 'icon': App.get_media('prev'), 'url': P.get_url( action='co_movies', order_id=params.order_id, genre_id=params.genre_id, page=pages['current'] - 1 - offset ) }) if pages['current'] < pages['total']: items.append({ 'label': App.replace_html_codes('[B]>[/B] Следующая страница'.decode('utf-8')), 'icon': App.get_media('next'), 'url': P.get_url( action='co_movies', order_id=params.order_id, genre_id=params.genre_id, page=pages['current'] + 1 - offset ) }) pages = { 'total': int(math.ceil(total / size + 1)), 'current': offset / size + 1 } return items
def co_movie(params): items = [] request_data = sorted([ ('action[0]', 'Video.getMovie'), ('movie_id[0]', params.id) ]) content = App.http_request(API, 'POST', request_data) if content: movie = json.loads(content)['json'][0]['response']['movie'] cover = '{0}/{1}'.format(URL, movie['covers'][0]['thumbnail']) for file in movie['files']: duration = '' url = P.get_url(action='co_movie_playlist', id=movie['movie_id']) if file['is_dir']: label = App.replace_html_codes(movie['name'] + ' Плейлист'.decode('utf-8')) else: label = file['name'] if len(movie['files']) > 1 else movie['name'] url = file['path'].replace('/home/video/', 'http://p0.oc.kg:8080/') duration = file['metainfo']['playtime_seconds'] items.append( { 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'stream_info': { 'video': { 'duration': duration } }, 'info': { 'video': { 'plot': get_description(movie), 'year': movie['year'], 'mpaa': movie['mpaa'], 'title': movie['name'], 'originaltitle': movie['international_name'], } }, 'url': url, 'is_playable': not(file['is_dir']), 'is_folder': not(file['is_dir']), } ) if file['is_dir']: break return App.create_listing(items, content='movies')
def co_movie(params): items = [] request_data = sorted([('action[0]', 'Video.getMovie'), ('movie_id[0]', params.id)]) content = App.http_request(API, 'POST', request_data) if content: movie = json.loads(content)['json'][0]['response']['movie'] cover = '{0}/{1}'.format(URL, movie['covers'][0]['thumbnail']) for file in movie['files']: duration = '' url = P.get_url(action='co_movie_playlist', id=movie['movie_id']) if file['is_dir']: label = App.replace_html_codes( movie['name'] + ' Плейлист'.decode('utf-8')) else: label = file['name'] if len( movie['files']) > 1 else movie['name'] url = file['path'].replace('/home/video/', 'http://p0.oc.kg:8080/') duration = file['metainfo']['playtime_seconds'] items.append({ 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'stream_info': { 'video': { 'duration': duration } }, 'info': { 'video': { 'plot': get_description(movie), 'year': movie['year'], 'mpaa': movie['mpaa'], 'title': movie['name'], 'originaltitle': movie['international_name'], } }, 'url': url, 'is_playable': not (file['is_dir']), 'is_folder': not (file['is_dir']), }) if file['is_dir']: break return App.create_listing(items, content='movies')
def ns_serial_season_episodes(params): items = [] xbmc.executebuiltin('ActivateWindow(busydialog)') content = App.http_request(URL + params.url) if content: html = BeautifulSoup(content, 'html.parser') serial_name = App.bs_get_text(html.find('h1')).strip() season_headers = html.find_all(class_='panel-title') for season_header in season_headers: try: season_name = App.bs_get_text(season_header.find('h2')) season_number = season_name.split(' ')[1] if params.season_number == '-1' or params.season_number == season_number: episodes_block = season_header.parent.find( class_='videos-pane') for episode in episodes_block.find_all('li'): try: link = episode.find('a').get('href') cover = episode.find('img').get('src') episode_number = App.bs_get_text( episode.find(class_='grey')).split(' ')[0] label = App.replace_html_codes( '%s %sx%s' % (serial_name, season_number, episode_number)) items.append(create_movie_item(link, label, cover)) except: P.log_error(traceback.format_exc()) except: P.log_error(traceback.format_exc()) xbmc.executebuiltin('Dialog.Close(busydialog)') if len(items) > 0: App.create_playlist(items) xbmc.executebuiltin('ActivateWindow(VideoPlaylist)') else: App.noty('playlist_empty')
def co_index(params): # response = App.http_request(URL, response='headers') # if 'set-cookie' in response: # for resp in response['set-cookie'].split(';'): # keyvalue = resp.split('=') # if (keyvalue[0] == 'PHPSESSID'): # STORAGE['cookie'] = resp # break return [{ 'label': '[ Поиск ]', 'icon': App.get_media('find'), 'url': P.get_url(action='co_search') }, { 'label': App.format_bold('Новинки'), 'url': P.get_url(action='co_movies', order_id='0') }, { 'label': 'Популярное', 'url': P.get_url(action='co_bestsellers') }, { 'label': 'По жанрам', 'url': P.get_url(action='co_genres') }, { 'label': 'Лучшие по версии IMDB', 'url': P.get_url(action='co_movies', order_id='2') }, { 'label': 'Лучшие по версии КиноПоиск', 'url': P.get_url(action='co_movies', order_id='9') }]
def get_bigger_cover(path): url = '' if path != '': try: paths = path.split('_') nums = paths[1][:-4] nums = map(int, nums.split('x')) y = 200 * nums[1] / nums[0] url = get_cover_by_size(paths[0], 200, y) if url == '': url = get_cover_by_size(paths[0], 200, y - 1) if url == '': url = get_cover_by_size(paths[0], 200, y + 1) if url == '': url = '{0}/{1}/image.jpg'.format(URL, paths[0]).replace('thumbnails', 'images') (resp_headers, resp_content) = H.request(url, 'GET') if resp_headers.status != 200: url = '' except: P.log_error(traceback.format_exc) return url if url != '' else '{0}/{1}'.format(URL, path)
def ns_serial_season_episodes(params): items = [] xbmc.executebuiltin('ActivateWindow(busydialog)') content = App.http_request(URL + params.url) if content: html = BeautifulSoup(content, 'html.parser') serial_name = App.bs_get_text(html.find('h1')).strip() season_headers = html.find_all(class_='panel-title') for season_header in season_headers: try: season_name = App.bs_get_text(season_header.find('h2')) season_number = season_name.split(' ')[1] if params.season_number == '-1' or params.season_number == season_number: episodes_block = season_header.parent.find(class_='videos-pane') for episode in episodes_block.find_all('li'): try: link = episode.find('a').get('href') cover = episode.find('img').get('src') episode_number = App.bs_get_text(episode.find(class_='grey')).split(' ')[0] label = App.replace_html_codes('%s %sx%s' % (serial_name, season_number, episode_number)) items.append(create_movie_item(link, label, cover)) except: P.log_error(traceback.format_exc()) except: P.log_error(traceback.format_exc()) xbmc.executebuiltin('Dialog.Close(busydialog)') if len(items) > 0: App.create_playlist(items) xbmc.executebuiltin('ActivateWindow(VideoPlaylist)') else: App.noty('playlist_empty')
def get_bigger_cover(path): url = '' if path != '': try: paths = path.split('_') nums = paths[1][:-4] nums = map(int, nums.split('x')) y = 200 * nums[1] / nums[0] url = get_cover_by_size(paths[0], 200, y) if url == '': url = get_cover_by_size(paths[0], 200, y - 1) if url == '': url = get_cover_by_size(paths[0], 200, y + 1) if url == '': url = '{0}/{1}/image.jpg'.format( URL, paths[0]).replace('thumbnails', 'images') (resp_headers, resp_content) = H.request(url, 'GET') if resp_headers.status != 200: url = '' except: P.log_error(traceback.format_exc) return url if url != '' else '{0}/{1}'.format(URL, path)
def co_genres(params): items = [] content = App.http_request(API, 'POST', {'action[0]': 'Video.getGenres'}) if content: data = json.loads(content)['json'][0]['response']['genres'] for genre in data: label = App.replace_html_codes( '[B]%s[/B] %s' % (genre['name'], genre['count']) ) items.append( { 'label': label, 'url': P.get_url(action='co_movies', order_id='0', genre_id=genre['id']) } ) return App.create_listing(items)
def co_genres(params): items = [] content = App.http_request(API, 'POST', {'action[0]': 'Video.getGenres'}) if content: data = json.loads(content)['json'][0]['response']['genres'] for genre in data: label = App.replace_html_codes('[B]%s[/B] %s' % (genre['name'], genre['count'])) items.append({ 'label': label, 'url': P.get_url(action='co_movies', order_id='0', genre_id=genre['id']) }) return App.create_listing(items)
def nm_genres(params): items = [] content = App.http_request(URL + '/movie/') if content: html = BeautifulSoup(content, 'html.parser') block = html.find(class_='categories-menu') if block is not None: for genre in block.find_all('a'): label = App.bs_get_text(genre) id = genre.get('href').split('=')[1] items.append({ 'label': label, 'url': P.get_url(action='nm_movies_by_genre', id=id) }) return App.create_listing(items)
def ts_selection(params): items = [] content = App.http_request(URL + params.href) if content: html = BeautifulSoup(content, 'html.parser') for show in html.find_all(class_='selection-show'): href = show.find('a').get('href') img = show.find('img') poster = URL + img.get('src') title = show.find('h4').find('a').get_text() title_original = App.bs_get_text(show.find('small')) description = App.bs_get_text( show.find(class_='selection-show-description')) # genres = App.STR_NO_DATA # country = App.STR_NO_DATA plot = ' '.join(description.split()) if title_original != '': plot = '%s\n\n%s' % (App.format_bold(title_original), plot) items.append({ 'label': title, 'thumb': poster, 'art': { 'poster': poster }, 'info': { 'video': { 'mediatype': 'tvshow', 'plot': plot } }, 'url': P.get_url(action='ts_tvshow_seasons', href=href) }) return App.create_listing(items, content='tvshows')
def nm_search(params): items = [] query = App.keyboard(heading='Поиск') if query is None: pass elif query != '': content = App.http_request(API + '/?%s' % urllib.urlencode({ 'service': 'home', 'action': 'search', 'type': 'movie', 'query': query, 'sort': 'desc' })) if content: movies = json.loads(content)['movies'] for movie in movies: cover = movie['preview'] items.append({ 'label': movie['title'], 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='nm_movie', id=movie['id']) }) if len(items) == 0: App.noty('no_search_results') else: App.noty('no_search_results') return App.create_listing(items, content='movies')
def ts_tvshow_seasons(params): items = [] content = App.http_request(URL + params.href) content = content.replace( '</span></strong></span></a>', '</span></strong></a></span>') # fix closing tags if content: html = BeautifulSoup(content, 'html.parser') title = html.find('meta', {'property': 'og:title'}).get('content') description = html.find('meta', { 'property': 'og:description' }).get('content').encode('utf-8') poster = html.find('meta', {'property': 'og:image'}).get('content') country = App.STR_NO_DATA genres = [] tvshow_tags = html.find(class_='app-show-tags') if tvshow_tags is not None: country = tvshow_tags.find(class_='app-show-tags-flag') if country: country = country.get('data-original-title') else: country = App.STR_NO_DATA for a in tvshow_tags.find_all('a'): href = a.get('href').split('/') if href[1] == 'genre': genres.append(a.get_text()) genres = App.explode_info_string(genres) description = App.format_description( description=description, country=country, genre='' if App.get_skin_name() == 'skin.confluence' else genres) meta_premiered = '' year = html.find('h3') if year: year = App.bs_get_text(year.find('a')) if year: meta_premiered = '%s-01-01' % year else: year = '' meta_genres = App.clear_xbmc_tags(genres) episodes_ids_all_seasons = '' episodes_all_seasons = 0 seasons = html.find_all('div', {'data-season': True}) for season in seasons: season_number = season.get('data-season') episodes_ids = [] episodes = season.find_all(class_="text-primary") for episode in episodes: episode_id = episode.get('id')[23:] episodes_ids.append(episode_id) episodes_ids = ','.join(episodes_ids) episodes_ids_all_seasons += episodes_ids episodes_all_seasons += len(episodes) items.append({ 'label': 'Сезон {0}'.format(season_number), 'icon': poster, 'fanart': poster, 'art': { 'poster': poster }, 'info': { 'video': { 'plot': description, 'year': year, 'genre': meta_genres, 'episode': str(len(episodes)) } }, 'url': P.get_url(action='ts_tvshow_season_episodes', season=season_number, episodes_ids=episodes_ids), 'is_folder': False }) items.insert( 0, { 'label': App.replace_html_codes('%s Все сезоны'.decode('utf-8') % App.format_bold(title)), 'thumb': poster, 'art': { 'poster': poster }, 'info': { 'video': { 'mediatype': 'tvshow', 'plot': description, 'premiered': meta_premiered, 'genre': meta_genres, 'episode': '10' } }, 'properties': { 'TotalSeasons': str(len(seasons)), 'TotalEpisodes': str(episodes_all_seasons) }, 'url': P.get_url(action='ts_tvshow_season_episodes', season='all', episodes_ids=episodes_ids_all_seasons), 'is_folder': False }) return App.create_listing(items, content='tvshows')
def ts_last_added(params): items = [] content = App.http_request(URL + '/news') if content: html = BeautifulSoup(content, 'html.parser') for block in html.find_all(class_='app-news-block'): date = App.bs_get_text(block.find(class_='app-news-date')) for list_item in block.find_all(class_='app-news-list-item'): badge = get_news_item_badge(list_item) link = list_item.find(class_='app-news-link') if not link: continue label = App.bs_get_text(link) sub_label = App.bs_get_text(list_item.find('small')) genres = link.get('title').replace( ', ', App.STR_LIST_DELIMITER).encode('utf-8') label = [ date[:2], App.format_bold(label), sub_label, App.format_bold(badge) ] label = filter(None, label) label = App.replace_html_codes(' '.join(label)) item_url = P.get_url(action='add_favorite') href = link.get('href') hrefs = href.split('/') href1 = hrefs[1] if href1 == 'selection': item_url = P.get_url(action='ts_selection', href=href) elif href1 == 'show': if len(hrefs) == 3: item_url = P.get_url(action='ts_tvshow_seasons', href=href) else: item_url = P.get_url(action='ts_tvshow_seasons', href='/'.join(hrefs[:3])) description = App.format_description(genre=genres) items.append({ 'label': label, 'info': { 'video': { 'plot': '' if App.get_skin_name() == 'skin.confluence' else description, 'genre': App.clear_xbmc_tags(genres) } }, 'url': item_url, }) return App.create_listing(items, content='tvshows')
def co_movies(params): if 'genre_id' not in params: params.genre_id = '' params.refresh = 'page' in params items = [] size = 40 try: if params.page == 'search': last_page = int(params.last_page) - 1 page = int(App.keyboard(heading='Перейти на страницу', numeric=True)) - 1 if page > last_page: page = last_page if page < 0: page = 0 params.page = str(page) else: page = int(params.page) except: page = 0 offset = page * size request_data = sorted([ ('action[0]', 'Video.getCatalog'), ('offset[0]', offset), ('size[0]', size), ('order[0]', params.order_id), ('genre[0]', params.genre_id) ]) content = App.http_request(API, 'POST', request_data) if content: data = json.loads(content)['json'][0]['response'] for movie in data['movies']: plot = get_description(movie) id = movie['movie_id'] cover = get_bigger_cover(movie['cover']) label = App.format_bold(movie['name']) items.append( { 'label': label, 'thumb': cover, 'fanart': cover, 'art': { 'poster': cover, }, 'info': { 'video': { 'plot': plot, 'year': movie['year'], 'genre': ' / '.join(movie['genres'][:2]) if 'genres' in movie else '' } }, 'url': P.get_url(action='co_movie', id=id) } ) movies_count = int(data['total']) add_pagination( items, get_pagination((offset / size + 1), movies_count, size, 1, params) ) if movies_count > size: total_pages = int(math.ceil(movies_count / size + 1)) items.insert(0, { 'label': App.replace_html_codes('%s %d / %d' % ('[ Перейти на страницу ]'.decode('utf-8'), page + 1, total_pages)), 'url': P.get_url(action='co_movies', order_id=params.order_id, genre_id=params.genre_id, page='search', last_page=total_pages) }) return App.create_listing(items, content='movies', update_listing=params.refresh)
def co_movies(params): if 'genre_id' not in params: params.genre_id = '' params.refresh = 'page' in params items = [] size = 40 try: if params.page == 'search': last_page = int(params.last_page) - 1 page = int( App.keyboard(heading='Перейти на страницу', numeric=True)) - 1 if page > last_page: page = last_page if page < 0: page = 0 params.page = str(page) else: page = int(params.page) except: page = 0 offset = page * size request_data = sorted([('action[0]', 'Video.getCatalog'), ('offset[0]', offset), ('size[0]', size), ('order[0]', params.order_id), ('genre[0]', params.genre_id)]) content = App.http_request(API, 'POST', request_data) if content: data = json.loads(content)['json'][0]['response'] for movie in data['movies']: plot = get_description(movie) id = movie['movie_id'] cover = get_bigger_cover(movie['cover']) label = App.format_bold(movie['name']) items.append({ 'label': label, 'thumb': cover, 'fanart': cover, 'art': { 'poster': cover, }, 'info': { 'video': { 'plot': plot, 'year': movie['year'], 'genre': ' / '.join(movie['genres'][:2]) if 'genres' in movie else '' } }, 'url': P.get_url(action='co_movie', id=id) }) movies_count = int(data['total']) add_pagination( items, get_pagination((offset / size + 1), movies_count, size, 1, params)) if movies_count > size: total_pages = int(math.ceil(movies_count / size + 1)) items.insert( 0, { 'label': App.replace_html_codes( '%s %d / %d' % ('[ Перейти на страницу ]'.decode('utf-8'), page + 1, total_pages)), 'url': P.get_url(action='co_movies', order_id=params.order_id, genre_id=params.genre_id, page='search', last_page=total_pages) }) return App.create_listing(items, content='movies', update_listing=params.refresh)
def ns_serial_seasons(params): items = [] content = App.http_request(URL + params.url) if content: html = BeautifulSoup(content, 'html.parser') name = App.bs_get_text(html.find('h1')).strip() description = html.find(class_='serial-description') year = '' plot = App.STR_NO_DATA cover = '' if description is not None: cover = description.find('img').get('src') description_text = App.bs_get_text( description.find(class_='text')).encode('utf-8') year = re_compile_or_no_data('Год(.+?)\n', description_text) country = re_compile_or_no_data('Страна(.+?)\n', description_text) if country == App.STR_NO_DATA: country = re_compile_or_no_data('Производство(.+?)\n', description_text) genre = re_compile_or_no_data('Жанр(.+?)\n', description_text) director = re_compile_or_no_data('Режиссер(.+?)\n', description_text) plot = re_compile_or_no_data('\n(.+?)\n \n', description_text) plot = App.format_description(description=plot, country=country, genre=genre, director=director) season_headers = html.find_all(class_='panel-title') items.append({ 'label': App.replace_html_codes('%s Все сезоны'.decode('utf-8') % App.format_bold(name)), 'thumb': cover, 'art': { 'poster': cover }, 'info': { 'video': { 'plot': plot, 'year': year } }, 'url': P.get_url(action='ns_serial_season_episodes', url=params.url, season_number='-1') }) for season_header in season_headers: label = App.bs_get_text(season_header.find('h2')) season_number = label.split(' ')[1] items.append({ 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='ns_serial_season_episodes', url=params.url, season_number=season_number) }) return App.create_listing(items, content='tvshows')
def get_shows_by_page(page): content = App.http_request(URL + '/show?' + urllib.urlencode({ 'category': params.id, 'sortby': 'a', 'page': page })) if content: html = BeautifulSoup(content, 'html.parser') shows = html.find(id='shows') if shows is not None: for show in shows.find_all(class_='show'): tag_a = show.find('a') tag_img = tag_a.find('img') tag_p = tag_a.find('p') episodes = show.find(class_='ec').get_text() href = tag_a.get('href') poster = URL + tag_img.get('src') genres = tag_img.get('title') if genres is not None: genres = genres.split(', ') if (len(genres) > 1): del genres[0] genres = App.explode_info_string(genres) else: genres = App.STR_NO_DATA country = tag_p.find('img') if country is not None: country = App.get_country(country.get('alt')) else: country = '' label = tag_p.get_text() description = App.format_description( episodes=episodes, country=country, genre='' if App.get_skin_name() == 'skin.confluence' else genres) items.append({ 'label': label, 'icon': poster, 'fanart': poster, 'art': { 'poster': poster }, 'info': { 'video': { 'mediatype': 'tvshow', 'title': label, 'genre': App.clear_xbmc_tags(genres), 'episode': episodes, 'plot': description } }, 'properties': { 'TotalEpisodes': episodes }, 'url': P.get_url(action='ts_tvshow_seasons', href=href) }) get_shows_by_page(page + 1)
def nm_movies_by_genre(params): params.refresh = 'page' in params items = [] try: if params.page == 'search': page = int( App.keyboard(heading='Перейти на страницу', numeric=True)) if page > params.total_pages: page = params.total_pages if page < 1: page = 1 params.page = str(page) else: page = int(params.page) except: page = 1 content = App.http_request( '%s/movie/category.php?%s' % (URL, urllib.urlencode({ 'id': params.id, 'p': page }))) if content: html = BeautifulSoup(content, 'html.parser') block = html.find(class_='result-block') for movie in block.find_all(class_='thumb'): link = movie.find('a') img = link.find('img') id = link.get('href').split('=')[1] cover = img.get('src') label = img.get('title') items.append({ 'label': label, 'thumb': cover, 'art': { 'poster': cover }, 'url': P.get_url(action='nm_movie', id=id) }) total_pages = int( re.compile('"paginator_container",\n (\d+?),').findall( str(html))[0]) add_pagination( items, get_pagination(page, total_pages, 'nm_movies_by_genre', params)) if total_pages > 1: items.insert( 0, { 'label': App.replace_html_codes( '%s %d / %d' % ('[ Перейти на страницу ]'.decode('utf-8'), page, total_pages)), 'url': P.get_url(action='nm_movies_by_genre', id=params.id, page='search', total_pages=total_pages) }) return App.create_listing(items, content='movies', update_listing=params.refresh)