def info_to_kodi(cls, data): data['info'] = {} data['info']['title'] = data['title'] data['info']['studio'] = data['studio'] data['info']['premiered'] = data['premiered'] #if data['info']['premiered'] == '': # data['info']['premiered'] = data['year'] + '-01-01' data['info']['year'] = data['year'] data['info']['genre'] = data['genre'] data['info']['plot'] = data['plot'] data['info']['tagline'] = data['tagline'] data['info']['mpaa'] = data['mpaa'] if 'director' in data and len(data['director']) > 0: if type(data['director'][0]) == type({}): tmp_list = [] for tmp in data['director']: tmp_list.append(tmp['name']) data['info']['director'] = ', '.join(tmp_list).strip() else: data['info']['director'] = data['director'] if 'credits' in data and len(data['credits']) > 0: data['info']['writer'] = [] if type(data['credits'][0]) == type({}): for tmp in data['credits']: data['info']['writer'].append(tmp['name']) else: data['info']['writer'] = data['credits'] if 'extras' in data and data['extras'] is not None and len( data['extras']) > 0: if data['extras'][0]['mode'] in ['naver', 'youtube']: url = '{ddns}/metadata/api/video?site={site}¶m={param}&apikey={apikey}'.format( ddns=SystemModelSetting.get('ddns'), site=data['extras'][0]['mode'], param=data['extras'][0]['content_url'], apikey=SystemModelSetting.get('auth_apikey')) data['info']['trailer'] = url elif data['extras'][0]['mode'] == 'mp4': data['info']['trailer'] = data['extras'][0]['content_url'] data['cast'] = [] if 'actor' in data and data['actor'] is not None: for item in data['actor']: entity = {} entity['type'] = 'actor' entity['role'] = item['role'] entity['name'] = item['name'] entity['thumbnail'] = item['thumb'] data['cast'].append(entity) if 'art' in data and data['art'] is not None: for item in data['art']: if item['aspect'] == 'landscape': item['aspect'] = 'fanart' elif 'thumb' in data and data['thumb'] is not None: for item in data['thumb']: if item['aspect'] == 'landscape': item['aspect'] = 'fanart' data['art'] = data['thumb'] if 'art' in data: data['art'] = sorted(data['art'], key=lambda k: k['score'], reverse=True) return data
def make_klive(sub): try: from klive.logic_klive import LogicKlive from klive.model import ModelSetting as KliveModelSetting if LogicKlive.source_list is None: tmp = LogicKlive.channel_load_from_site() instance = LogicKlive.source_list['wavve'] from system.model import ModelSetting as SystemModelSetting tmp = builder.ElementMaker( nsmap={'itunes': 'http://www.itunes.com/dtds/podcast-1.0.dtd'}) root = tmp.rss(version="2.0") EE = builder.ElementMaker( namespace="http://www.itunes.com/dtds/podcast-1.0.dtd", nsmap={'itunes': 'http://www.itunes.com/dtds/podcast-1.0.dtd'}) channel_tag = (E.channel( E.title('KLive Radio'), E.link(), E.description('KLive Radio'), E.language('ko-kr'), E.copyright(''), EE.subtitle(), EE.author(), EE.summary('KLive Radio'), EE.category('Radio'), EE.image(), EE.explicit('no'), EE.keywords('Radio'), )) root.append(channel_tag) for idx, c in enumerate(instance.get_channel_list()): if not c.is_tv: logger.debug(c.title) logger.debug(c.current) logger.debug(c.source_id) logger.debug(c.source) url = '%s/klive/api/url.m3u8?m=url&s=%s&i=%s' % ( SystemModelSetting.get('ddns'), c.source, c.source_id) if SystemModelSetting.get_bool('auth_use_apikey'): url += '&apikey=%s' % SystemModelSetting.get( 'auth_apikey') channel_tag.append( E.item( E.title(c.title), EE.subtitle(c.current), EE.summary(c.current), E.guid(str(idx + 1)), E.pubDate(datetime.now().strftime( '%a, %d %b %Y %H:%M:%S') + ' +0900'), #EE.duration(), E.enclosure( url=url ), #, length=item['file_size'], type='audio/mp3'), E.description(c.current))) return app.response_class(ET.tostring(root, pretty_print=True, xml_declaration=True, encoding="utf-8"), mimetype='application/xml') except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def info(cls, code, title): try: if title == '모델': title = '드라마 모델' ret = {} show = EntityShow(cls.site_name, code) # 종영와, 방송중이 표현 정보가 다르다. 종영은 studio가 없음 url = 'https://search.daum.net/search?w=tv&q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote(str(title)), code[2:]) show.home = url root = SiteUtil.get_tree( url, proxy_url=SystemModelSetting.get('site_daum_proxy'), headers=cls.default_headers, cookies=SystemLogicSite.get_daum_cookies()) home_url = 'https://search.daum.net/search?q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote(str(title)), code[2:]) #logger.debug(home_url) home_root = SiteUtil.get_tree( home_url, proxy_url=SystemModelSetting.get('site_daum_proxy'), headers=cls.default_headers, cookies=SystemLogicSite.get_daum_cookies()) home_data = cls.get_show_info_on_home(home_root) #logger.debug('home_datahome_datahome_datahome_datahome_datahome_datahome_datahome_datahome_data') #logger.debug(home_data) tags = root.xpath('//*[@id="tv_program"]/div[1]/div[2]/strong') if len(tags) == 1: show.title = tags[0].text_content().strip() show.originaltitle = show.title show.sorttitle = show.title #unicodedata.normalize('NFKD', show.originaltitle) #logger.debug(show.sorttitle) """ tags = root.xpath('//*[@id="tv_program"]/div[1]/div[3]/span') # 이 정보가 없다면 종영 if tags: show.studio = tags[0].text_content().strip() summary = '' for tag in tags: entity.plot += tag.text.strip() entity.plot += ' ' match = re.compile(r'(\d{4}\.\d{1,2}\.\d{1,2})~').search(entity.plot) if match: show.premiered = match.group(1) """ show.studio = home_data['studio'] show.plot = home_data['desc'] match = re.compile( r'(?P<year>\d{4})\.(?P<month>\d{1,2})\.(?P<day>\d{1,2})' ).search(home_data['broadcast_term']) if match: show.premiered = match.group('year') + '-' + match.group( 'month').zfill(2) + '-' + match.group('day').zfill(2) show.year = int(match.group('year')) try: if show.year == '' and home_data['year'] != 0: show.year = home_data['year'] except: pass show.status = home_data['status'] show.genre = [home_data['genre']] show.episode = home_data['episode'] tmp = root.xpath('//*[@id="tv_program"]/div[1]/div[1]/a/img') #logger.debug(tmp) try: show.thumb.append( EntityThumb( aspect='poster', value=cls.process_image_url( root.xpath( '//*[@id="tv_program"]/div[1]/div[1]/a/img') [0].attrib['src']), site='daum', score=-10)) except: pass if True: tags = root.xpath('//ul[@class="col_size3 list_video"]/li') for idx, tag in enumerate(tags): if idx > 9: break a_tags = tag.xpath('.//a') if len(a_tags) == 2: thumb = cls.process_image_url( a_tags[0].xpath('.//img')[0].attrib['src']) video_url = a_tags[1].attrib['href'].split('/')[-1] title = a_tags[1].text_content() date = cls.change_date( tag.xpath('.//span')[0].text_content().strip()) content_type = 'Featurette' if title.find(u'예고') != -1: content_type = 'Trailer' show.extras.append( EntityExtra(content_type, title, 'kakao', video_url, premiered=date, thumb=thumb)) for i in range(1, 3): items = root.xpath('//*[@id="tv_casting"]/div[%s]/ul//li' % i) #logger.debug('CASTING ITEM LEN : %s' % len(items)) for item in items: actor = EntityActor(None) cast_img = item.xpath('div//img') #cast_img = item.xpath('.//img') if len(cast_img) == 1: actor.thumb = cls.process_image_url( cast_img[0].attrib['src']) #logger.debug(actor.thumb) span_tag = item.xpath('span') for span in span_tag: span_text = span.text_content().strip() tmp = span.xpath('a') if len(tmp) == 1: role_name = tmp[0].text_content().strip() tail = tmp[0].tail.strip() if tail == u'역': actor.type = 'actor' actor.role = role_name.strip() else: actor.name = role_name.strip() else: if span_text.endswith(u'역'): actor.role = span_text.replace(u'역', '') elif actor.name == '': actor.name = span_text.strip() else: actor.role = span_text.strip() if actor.type == 'actor' or actor.role.find(u'출연') != -1: show.actor.append(actor) elif actor.role.find(u'감독') != -1 or actor.role.find( u'연출') != -1: show.director.append(actor) elif actor.role.find(u'제작') != -1 or actor.role.find( u'기획') != -1 or actor.role.find(u'책임프로듀서') != -1: show.director.append(actor) elif actor.role.find(u'극본') != -1 or actor.role.find( u'각본') != -1: show.credits.append(actor) elif actor.name != u'인물관계도': show.actor.append(actor) # 에피소드 items = root.xpath('//*[@id="clipDateList"]/li') #show.extra_info['episodes'] = {} for item in items: epi = {} a_tag = item.xpath('a') if len(a_tag) != 1: continue epi['url'] = 'https://search.daum.net/search%s' % a_tag[ 0].attrib['href'] tmp = item.attrib['data-clip'] epi['premiered'] = tmp[0:4] + '-' + tmp[4:6] + '-' + tmp[6:8] match = re.compile(r'(?P<no>\d+)%s' % u'회').search( a_tag[0].text_content().strip()) if match: epi['no'] = int(match.group('no')) show.extra_info['episodes'][epi['no']] = { 'daum': { 'code': cls.module_char + cls.site_char + epi['url'], 'premiered': epi['premiered'] } } tags = root.xpath( '//*[@id="tv_program"]//div[@class="clipList"]//div[@class="mg_expander"]/a' ) show.extra_info['kakao_id'] = None if tags: tmp = tags[0].attrib['href'] show.extra_info['kakao_id'] = re.compile( '/(?P<id>\d+)/').search(tmp).group('id') tags = root.xpath( "//a[starts-with(@href, 'http://www.tving.com/vod/player')]") #tags = root.xpath('//a[@contains(@href, "tving.com")') if tags: show.extra_info['tving_episode_id'] = tags[0].attrib[ 'href'].split('/')[-1] ret['ret'] = 'success' ret['data'] = show.as_dict() except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) ret['ret'] = 'exception' ret['data'] = str(exception) return ret
def episode_info(cls, episode_code, include_kakao=False, is_ktv=True): try: ret = {} episode_code = episode_code[2:] root = SiteUtil.get_tree( episode_code, proxy_url=SystemModelSetting.get('site_daum_proxy'), headers=cls.default_headers, cookies=SystemLogicSite.get_daum_cookies()) items = root.xpath('//div[@class="tit_episode"]') entity = EntityEpisode(cls.site_name, episode_code) if len(items) == 1: tmp = items[0].xpath('strong') if len(tmp) == 1: episode_frequency = tmp[0].text_content().strip() match = re.compile(r'(\d+)').search(episode_frequency) if match: entity.episode = int(match.group(1)) tmp = items[0].xpath('span[@class="txt_date "]') date1 = '' if len(tmp) == 1: date1 = tmp[0].text_content().strip() entity.premiered = cls.change_date(date1.split('(')[0]) entity.title = date1 tmp = items[0].xpath('span[@class="txt_date"]') if len(tmp) == 1: date2 = tmp[0].text_content().strip() entity.title = ('%s %s' % (date1, date2)).strip() items = root.xpath('//p[@class="episode_desc"]') if len(items) == 1: tmp = items[0].xpath('strong') if len(tmp) == 1: title = tmp[0].text_content().strip() if title != 'None': if is_ktv: entity.title = '%s %s' % (entity.title, title) else: entity.title = title else: if is_ktv == False: entity.title = '' summary2 = '\r\n'.join( txt.strip() for txt in root.xpath('//p[@class="episode_desc"]/text()')) entity.plot = '%s\r\n%s' % (entity.title, summary2) items = root.xpath('//*[@id="tv_episode"]/div[2]/div[1]/div/a/img') if len(items) == 1: entity.thumb.append( EntityThumb(aspect='landscape', value=cls.process_image_url( items[0].attrib['src']), site=cls.site_name, score=-10)) if include_kakao: tags = root.xpath('//*[@id="tv_episode"]/div[3]/div/ul/li') for idx, tag in enumerate(tags): if idx > 9: break a_tags = tag.xpath('.//a') if len(a_tags) == 2: thumb = cls.process_image_url( a_tags[0].xpath('.//img')[0].attrib['src']) #video_url = cls.get_kakao_play_url(a_tags[1].attrib['href']) video_url = a_tags[1].attrib['href'].split('/')[-1] title = a_tags[1].text_content() #logger.debug(video_url) date = cls.change_date( tag.xpath('.//span')[0].text_content().strip()) entity.extras.append( EntityExtra('Featurette', title, 'kakao', video_url, premiered=date, thumb=thumb)) ret['ret'] = 'success' ret['data'] = entity.as_dict() except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) ret['ret'] = 'exception' ret['data'] = str(exception) return ret
def get_show_info_on_home(cls, root): try: tags = root.xpath('//*[@id="tvpColl"]/div[2]/div/div[1]/span/a') # 2019-05-13 #일밤- 미스터리 음악쇼 복면가왕 A 태그 2개 if len(tags) < 1: return tag_index = len(tags) - 1 #entity = {} entity = EntitySearchItemTvDaum(cls.site_name) entity.title = tags[tag_index].text match = re.compile(r'q\=(?P<title>.*?)&').search( tags[tag_index].attrib['href']) if match: entity.title = py_urllib.unquote(match.group('title')) entity.code = cls.module_char + cls.site_char + re.compile( r'irk\=(?P<id>\d+)').search( tags[tag_index].attrib['href']).group('id') tags = root.xpath('//*[@id="tvpColl"]/div[2]/div/div[1]/span/span') if len(tags) == 1: if tags[0].text == u'방송종료' or tags[0].text == u'완결': entity.status = 2 elif tags[0].text == u'방송예정': entity.status = 0 #entity.image_url = 'https:' + root.xpath('//*[@id="tv_program"]/div[1]/div[1]/a/img')[0].attrib['src'] # 악동탐정스 시즌2 try: entity.image_url = cls.process_image_url( root.xpath('//*[@id="tv_program"]/div[1]/div[1]/a/img') [0].attrib['src']) except: entity.image_url = None #logger.debug('get_show_info_on_home status: %s', entity.status) tags = root.xpath('//*[@id="tvpColl"]/div[2]/div/div[1]/div') entity.extra_info = SiteUtil.change_html( tags[0].text_content().strip()) #logger.debug('get_show_info_on_home extra_info: %s', entity.extra_info) tags = root.xpath('//*[@id="tvpColl"]/div[2]/div/div[1]/div/a') if len(tags) == 1: entity.studio = tags[0].text else: tags = root.xpath( '//*[@id="tvpColl"]/div[2]/div/div[1]/div/span[1]') if len(tags) == 1: entity.studio = tags[0].text #logger.debug('get_show_info_on_home studio: %s', entity.studio) tags = root.xpath('//*[@id="tvpColl"]/div[2]/div/div[1]/div/span') extra_infos = [tag.text_content() for tag in tags] logger.debug(extra_infos) #tmps = extra_infos[1].strip().split(' ') # 2021-11-03 # 홍루몽. 중국 방송사는 a 태그가 없기 떄문에 방송사가 장르가 되어버린다. entity.genre = extra_infos[0] if extra_infos[1] in [ '미국드라마', '중국드라마', '영국드라마', '일본드라마', '대만드라마', '기타국가드라마' ]: entity.genre = extra_infos[1] entity.studio = extra_infos[0] if entity.genre in [ '미국드라마', '중국드라마', '영국드라마', '일본드라마', '대만드라마', '기타국가드라마' ]: entity.status = 1 #logger.debug(tmps) #if len(tmps) == 2: try: entity.episode = int( re.compile(r'(?P<epi>\d{1,4})%s' % u'부').search( entity.extra_info).group('epi')) except: entity.episode = -1 entity.broadcast_info = extra_infos[-2].strip().replace( ' ', ' ').replace(' ', ' ') entity.broadcast_term = extra_infos[-1].split(',')[-1].strip() try: entity.year = re.compile(r'(?P<year>\d{4})').search( extra_infos[-1]).group('year') except: entity.year = 0 entity.desc = root.xpath( '//*[@id="tv_program"]/div[1]/dl[1]/dd/text()')[0] #logger.debug('get_show_info_on_home 1: %s', entity['status']) #시리즈 entity.series = [] try: tmp = entity.broadcast_term.split('.') if len(tmp) == 2: entity.series.append({ 'title': entity.title, 'code': entity.code, 'year': entity.year, 'status': entity.status, 'date': '%s.%s' % (tmp[0], tmp[1]) }) else: entity.series.append({ 'title': entity.title, 'code': entity.code, 'year': entity.year, 'status': entity.status, 'date': '%s' % (entity.year) }) except Exception as exception: logger.debug('Not More!') logger.debug(traceback.format_exc()) tags = root.xpath('//*[@id="tv_series"]/div/ul/li') if tags: # 2019-03-05 시리즈 더보기 존재시 try: more = root.xpath('//*[@id="tv_series"]/div/div/a') if more: url = more[0].attrib['href'] if not url.startswith('http'): url = 'https://search.daum.net/search%s' % url #logger.debug('MORE URL : %s', url) if more[0].xpath('span')[0].text == u'시리즈 더보기': #more_root = HTML.ElementFromURL(url) more_root = SiteUtil.get_tree( url, proxy_url=SystemModelSetting.get( 'site_daum_proxy'), headers=cls.default_headers, cookies=SystemLogicSite.get_daum_cookies()) tags = more_root.xpath('//*[@id="series"]/ul/li') except Exception as exception: logger.debug('Not More!') logger.debug(traceback.format_exc()) find_1900 = False for tag in tags: dic = {} dic['title'] = tag.xpath('a')[0].text #logger.debug(dic['title']) dic['code'] = cls.module_char + cls.site_char + re.compile( r'irk\=(?P<id>\d+)').search( tag.xpath('a')[0].attrib['href']).group('id') if tag.xpath('span'): # 년도 없을 수 있음 dic['date'] = tag.xpath('span')[0].text if dic['date'] is None: dic['date'] = '1900' find_1900 = True else: dic['year'] = re.compile( r'(?P<year>\d{4})').search( dic['date']).group('year') else: dic['year'] = None entity.series.append(dic) # 뒷 시즌이 code가 더 적은 경우 있음. csi 라스베가스 # 2021-03-29 전지적 짝사랑 시점 if find_1900 or entity.year == 0: entity.series = sorted(entity.series, key=lambda k: int(k['code'][2:])) else: # 2021-06-06 펜트하우스3. 2는 2021.2로 나오고 3은 2021로만 나와서 00이 붙어 3이 위로 가버림 # 같은 년도는 코드로... """ for item in entity.series: tmp = item['date'].split('.') if len(tmp) == 2: item['sort_value'] = int('%s%s' % (tmp[0],tmp[1].zfill(2))) elif len(tmp) == 1: item['sort_value'] = int('%s00' % tmp[0]) entity.series = sorted(entity.series, key=lambda k: k['sort_value']) """ for item in entity.series: tmp = item['date'].split('.') if len(tmp) == 2: item['sort_value'] = int(tmp[0]) elif len(tmp) == 1: item['sort_value'] = int(tmp[0]) entity.series = sorted( entity.series, key=lambda k: (k['sort_value'], int(k['code'][2:]))) #동명 entity.equal_name = [] tags = root.xpath( u'//div[@id="tv_program"]//dt[contains(text(),"동명 콘텐츠")]//following-sibling::dd' ) if tags: tags = tags[0].xpath('*') for tag in tags: if tag.tag == 'a': dic = {} dic['title'] = tag.text dic['code'] = cls.module_char + cls.site_char + re.compile( r'irk\=(?P<id>\d+)').search( tag.attrib['href']).group('id') elif tag.tag == 'span': match = re.compile( r'\((?P<studio>.*?),\s*(?P<year>\d{4})?\)').search( tag.text) if match: dic['studio'] = match.group('studio') dic['year'] = match.group('year') elif tag.text == u'(동명프로그램)': entity.equal_name.append(dic) elif tag.text == u'(동명회차)': continue #logger.debug(entity) return entity.as_dict() except Exception as exception: logger.debug('Exception get_show_info_by_html : %s', exception) logger.debug(traceback.format_exc())
def send_telegram_message(self, item): try: telegram_log = '😉 봇 다운로드 - TV\n' telegram_log += '정보 : %s (%s), %s회, %s\n' % ( item.daum_title, item.daum_genre, item.filename_number, item.filename_date) if item.download_status.startswith('True'): status_str = '✔요청 ' elif item.download_status.startswith('False'): status_str = '⛔패스 ' else: status_str = '🕛대기 ' if item.plex_key is not None: if item.plex_key.startswith('P'): status_str += '(PLEX 프로그램⭕ 에피소드❌) ' elif item.plex_key.startswith('E'): status_str += '(PLEX 프로그램⭕ 에피소드⭕) ' else: status_str += '(PLEX 프로그램❌) ' if item.download_status == 'True_by_plex_in_lib_multi_epi': status_str += '에피소드 멀티파일' elif item.download_status == 'False_by_plex_in_one_epi': status_str += '에피소드 이미 있음' elif item.download_status == 'True_by_plex_in_lib_no_epi': status_str += '에피소드 없음' elif item.download_status == 'True_blacklist': status_str += '블랙리스트에 없음' elif item.download_status == 'False_whitelist': status_str += '화이트리스트에 없음' elif item.download_status == 'False_except_program': status_str += '블랙리스트' elif item.download_status == 'True_whitelist_program': status_str += '화이트리스트' elif item.download_status == 'True_whitelist_first_epi': status_str += '첫번째 에피소드' elif item.download_status == 'False_no_meta': status_str += 'Daum 검색 실패' elif item.download_status == 'False_except_genre': status_str += '블랙리스트 장르' elif item.download_status == 'True_whitelist_genre': status_str += '화이트리스트 장르' elif item.download_status == 'False_not_allow_duplicate_episode': status_str += '중복 제외' elif item.download_status == 'False_exist_download_quality': status_str += '동일 화질 받음' elif item.download_status == 'False_not_match_condition_quality': status_str += '화질 조건 불일치' elif item.download_status == 'False_not_match_condition_include_keyword': status_str += '단어 포함 조건' elif item.download_status == 'False_match_condition_except_keyword': status_str += '단어 제외 조건' telegram_log += '결과 : %s\n' % status_str telegram_log += '파일명 : %s\n' % item.filename telegram_log += '%s/%s/list\n' % (SystemModelSetting.get('ddns'), package_name) #telegram_log += item.download_status + '\n' telegram_log += '로그\n' + item.log import framework.common.notify as Notify Notify.send_message(telegram_log, message_id='bot_downloader_ktv_result') except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def xmltv_php(source): url = '/%s/%s/api/epg' % (package_name, source) if SystemModelSetting.get_bool('auth_use_apikey'): url += '?apikey=%s' % SystemModelSetting.get('auth_apikey') return redirect(url)
def api(sub): if sub == 'url.m3u8': try: mode = request.args.get('m') source = request.args.get('s') source_id = request.args.get('i') quality = request.args.get('q') #logger.debug('m:%s, s:%s, i:%s', mode, source, source_id) action, ret = LogicKlive.get_url(source, source_id, quality, mode) #logger.debug('action:%s, url:%s', action, ret) if mode == 'plex': #new_url = '%s/klive/api/url.m3u8?m=web_play&s=%s&i=%s&q=%s' % (SystemModelSetting.get('ddns'), source, source_id, quality) new_url = '%s/klive/api/url.m3u8?m=url&s=%s&i=%s&q=%s' % ( SystemModelSetting.get('ddns'), source, source_id, quality) #logger.debug(SystemModelSetting.get_bool('auth_use_apikey')) if SystemModelSetting.get_bool('auth_use_apikey'): new_url += '&apikey=%s' % SystemModelSetting.get( 'auth_apikey') def generate(): startTime = time.time() buffer = [] sentBurst = False if platform.system() == 'Windows': path_ffmpeg = os.path.join(path_app_root, 'bin', platform.system(), 'ffmpeg.exe') else: path_ffmpeg = 'ffmpeg' #ffmpeg_command = [path_ffmpeg, "-i", new_url, "-c", "copy", "-f", "mpegts", "-tune", "zerolatency", "pipe:stdout"] #ffmpeg_command = [path_ffmpeg, "-i", new_url, "-c:v", "copy", "-c:a", "aac", "-b:a", "128k", "-f", "mpegts", "-tune", "zerolatency", "pipe:stdout"] # 2020-12-17 by 잠자 ffmpeg_command = [ path_ffmpeg, "-loglevel", "quiet", "-i", new_url, "-c:v", "copy", "-c:a", "aac", "-b:a", "128k", "-f", "mpegts", "-tune", "zerolatency", "pipe:stdout" ] #logger.debug('command : %s', ffmpeg_command) process = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1) global process_list process_list.append(process) while True: line = process.stdout.read(1024) buffer.append(line) if sentBurst is False and time.time( ) > startTime + 1 and len(buffer) > 0: sentBurst = True for i in range(0, len(buffer) - 2): yield buffer.pop(0) elif time.time() > startTime + 1 and len(buffer) > 0: yield buffer.pop(0) process.poll() if isinstance(process.returncode, int): if process.returncode > 0: logger.debug('FFmpeg Error :%s', process.returncode) break return Response(stream_with_context(generate()), mimetype="video/MP2T") if action == 'redirect': return redirect(ret, code=302) elif action == 'return_after_read': data = LogicKlive.get_return_data(source, source_id, ret, mode) #logger.debug('Data len : %s', len(data)) return data, 200, { 'Content-Type': 'application/vnd.apple.mpegurl' } elif action == 'return': return ret if ret == None: return if mode == 'url.m3u8': return redirect(ret, code=302) elif mode == 'lc': return ret except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) elif sub == 'm3uall': return LogicKlive.get_m3uall() elif sub == 'm3u': data = LogicKlive.get_m3u(m3u_format=request.args.get('format'), group=request.args.get('group'), call=request.args.get('call')) if request.args.get('file') == 'true': import framework.common.util as CommonUtil basename = 'klive_custom.m3u' filename = os.path.join(path_data, 'tmp', basename) CommonUtil.write_file(data, filename) return send_file(filename, as_attachment=True, attachment_filename=basename) else: return data elif sub == 'm3utvh': return LogicKlive.get_m3u(for_tvh=True, m3u_format=request.args.get('format'), group=request.args.get('group')) elif sub == 'redirect': try: url = request.args.get('url') proxy = request.args.get('proxy') proxies = None if proxy is not None: proxy = py_urllib.unquote(proxy) proxies = {"https": proxy, 'http': proxy} url = py_urllib.unquote(url) #logger.debug('REDIRECT:%s', url) res = requests.get(url, proxies=proxies) data = res.content return data, 200, {'Content-Type': res.headers['Content-Type']} except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) elif sub == 'url.mpd': try: mode = request.args.get('m') source = request.args.get('s') source_id = request.args.get('i') quality = request.args.get('q') return_format = 'json' data = LogicKlive.get_play_info(source, source_id, quality, mode=mode, return_format=return_format) return jsonify(data) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) elif sub == 'url.strm': try: mode = request.args.get('m') source = request.args.get('s') source_id = request.args.get('i') quality = request.args.get('q') return_format = 'strm' data = LogicKlive.get_play_info(source, source_id, quality, mode=mode, return_format=return_format) #return data import framework.common.util as CommonUtil from .model import ModelCustom db_item = ModelCustom.get(source, source_id) if db_item is not None: basename = '%s.strm' % db_item.title else: basename = '%s.strm' % source_id filename = os.path.join(path_data, 'tmp', basename) CommonUtil.write_file(data, filename) return send_file(filename, as_attachment=True, attachment_filename=basename) #return data except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) elif sub == 'sinaplayer': data = LogicKlive.get_m3u_for_sinaplayer() return data
def first_menu(sub): #logger.debug('DETAIL %s %s', package_name, sub) try: arg = ModelSetting.to_dict() arg['package_name'] = package_name arg['ddns'] = SystemModelSetting.get('ddns') arg['auth_use_apikey'] = str( SystemModelSetting.get_bool('auth_use_apikey')) arg['auth_apikey'] = SystemModelSetting.get('auth_apikey') if sub == 'setting': arg['scheduler'] = str(scheduler.is_include(package_name)) arg['is_running'] = str(scheduler.is_running(package_name)) ddns = SystemModelSetting.get('ddns') arg['api_m3u'] = '{ddns}/{package_name}/api/m3u'.format( ddns=ddns, package_name=package_name) arg['api_m3utvh'] = '{ddns}/{package_name}/api/m3utvh'.format( ddns=ddns, package_name=package_name) arg['api_m3uall'] = '{ddns}/{package_name}/api/m3uall'.format( ddns=ddns, package_name=package_name) arg['xmltv'] = '{ddns}/epg/api/klive'.format(ddns=ddns) arg['plex_proxy'] = '{ddns}/{package_name}/proxy'.format( ddns=ddns, package_name=package_name) arg['wavve_vod'] = '{ddns}/{package_name}/wavve/api/m3u'.format( ddns=ddns, package_name=package_name) arg['tving_vod'] = '{ddns}/{package_name}/tving/api/m3u'.format( ddns=ddns, package_name=package_name) if SystemModelSetting.get_bool('auth_use_apikey'): apikey = SystemModelSetting.get('auth_apikey') for tmp in [ 'api_m3u', 'api_m3uall', 'api_m3utvh', 'xmltv', 'wavve_vod', 'tving_vod' ]: arg[tmp] += '?apikey={apikey}'.format(apikey=apikey) from .source_streamlink import SourceStreamlink arg['is_streamlink_installed'] = 'Installed' if SourceStreamlink.is_installed( ) else 'Not Installed' from .source_youtubedl import SourceYoutubedl arg['is_youtubedl_installed'] = 'Installed' if SourceYoutubedl.is_installed( ) else 'Not Installed' return render_template('{package_name}_{sub}.html'.format( package_name=package_name, sub=sub), arg=arg) elif sub == 'list': return render_template('{package_name}_{sub}.html'.format( package_name=package_name, sub=sub), arg=arg) elif sub == 'custom_create': return render_template('{package_name}_{sub}.html'.format( package_name=package_name, sub=sub), arg=arg) elif sub == 'custom_edit': return render_template('{package_name}_{sub}.html'.format( package_name=package_name, sub=sub), arg=arg) elif sub == 'proxy': return redirect('/klive/proxy/discover.json') elif sub == 'log': return render_template('log.html', package=package_name) return render_template('sample.html', title='%s - %s' % (package_name, sub)) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())