def search(cls, keyword, daum_id=None, year=None, image_mode='0'): try: keyword = cls.get_search_name_from_original(keyword) ret = {} if daum_id is None: url = 'https://search.daum.net/search?q=%s' % (py_urllib.quote( str(keyword))) else: url = 'https://search.daum.net/search?q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote(str(keyword)), daum_id) root = SiteUtil.get_tree( url, proxy_url=SystemModelSetting.get('site_daum_proxy'), headers=cls.default_headers, cookies=SystemLogicSite.get_daum_cookies()) data = cls.get_show_info_on_home(root) #logger.debug(data) # KD58568 : 비하인드 더 쇼 if data is not None and data['code'] in ['KD58568']: data = None if data is None: ret['ret'] = 'empty' else: ret['ret'] = 'success' ret['data'] = data except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) ret['ret'] = 'exception' ret['data'] = str(exception) return ret
def get_show_info_on_home_title(title, daum_id=None): try: title = title.replace(u'[종영]', '') if daum_id is None: url = 'https://search.daum.net/search?q=%s' % (py_urllib.quote( title.encode('utf8'))) else: url = 'https://search.daum.net/search?q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote(title.encode('utf8')), daum_id) return Logic.get_lxml_by_url(url) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def trans_name(name): trans_papago_key = ModelSetting.get_list('trans_papago_key') for tmp in trans_papago_key: client_id, client_secret = tmp.split(',') try: if client_id == '' or client_id is None or client_secret == '' or client_secret is None: return logger.debug(name) encText = py_urllib.quote(str(name)) logger.debug(encText) url = "https://openapi.naver.com/v1/krdict/romanization?query=" + encText requesturl = py_urllib2.Request(url) requesturl.add_header("X-Naver-Client-Id", client_id) requesturl.add_header("X-Naver-Client-Secret", client_secret) response = py_urllib2.urlopen(requesturl) if sys.version_info[0] == 2: data = json.load(response, encoding='utf8') else: data = json.load(response) rescode = response.getcode() logger.debug(data) if rescode == 200: return data else: continue except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) return
def plungin_command(req): try: command = req.form['cmd'] if command == 'get_plugin_list': url = 'https://raw.githubusercontent.com/soju6jan/sjva_support/master/plex_install_plugin_list.json' return {'ret':'success', 'data':requests.get(url).json()['list']} #return json.loads(requests.get(url).text) else: param1 = req.form['param1'] if 'param1' in req.form else '' param2 = req.form['param2'] if 'param2' in req.form else '' server_url = db.session.query(ModelSetting).filter_by(key='server_url').first().value server_token = db.session.query(ModelSetting).filter_by(key='server_token').first().value if param1 != '': param1 = py_urllib.quote(param1) url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/command?cmd=%s¶m1=%s¶m2=%s&X-Plex-Token=%s' % (server_url, command, param1, param2, server_token) logger.debug('URL:%s', url) request = py_urllib2.Request(url) response = py_urllib2.urlopen(request) data = response.read() data = json.loads(data) return data except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def write_blob(self, data, info): key = py_urllib.quote(info['type'].encode('utf-8'), '') +\ '/' + str(hash(data)) +\ '/' + py_urllib.quote(info['name'].encode('utf-8'), '') try: memcache.set(key, data, time=EXPIRATION_TIME) except: #Failed to add to memcache return (None, None) thumbnail_key = None if IMAGE_TYPES.match(info['type']): try: img = images.Image(image_data=data) img.resize(width=THUMB_MAX_WIDTH, height=THUMB_MAX_HEIGHT) thumbnail_data = img.execute_transforms() thumbnail_key = key + THUMB_SUFFIX memcache.set(thumbnail_key, thumbnail_data, time=EXPIRATION_TIME) except: #Failed to resize Image or add to memcache thumbnail_key = None return (key, thumbnail_key)
def post(self): if (self.request.get('_method') == 'DELETE'): return self.delete() result = {'files': self.handle_upload()} s = self.json_stringify(result) redirect = self.request.get('redirect') if self.validate_redirect(redirect): return self.redirect( str(redirect.replace('%s', py_urllib.quote(s, ''), 1))) if 'application/json' in self.request.headers.get('Accept'): self.response.headers['Content-Type'] = 'application/json' self.response.write(s)
def api(sub): if sub == 'decrypt': try: code = request.args.get('c') quality = request.args.get('q') token = request.args.get('t') token = '_tving_token=%s' % py_urllib.quote(token) ret = Tving.get_episode_json(code, quality) return ret[1] except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) return str(e)
def get_filename_encoding_for_plex(filename): try: #ret = filename.encode('cp949') ret = filename.encode('utf8') except Exception as exception: logger.error('Exception1:%s', exception) #logger.error(traceback.format_exc()) try: ret = filename.encode('utf8') except Exception as exception: logger.error('Exception3:%s', exception) #logger.error(traceback.format_exc()) return py_urllib.quote(ret)
def get_show_info(title, no=None, date=None): try: # Home title = DaumTV.get_search_name_from_original(title) url = 'https://search.daum.net/search?q=%s' % (py_urllib.quote(title.encode('utf8'))) data = DaumTV.get_html(url) root = lxml.html.fromstring(data) home_info = DaumTV.get_show_info_on_home(root) tv = DaumTV.get_daum_tv_info(title) ret = {'home':home_info, 'tv':tv} return ret except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def daum_get_ratings_list(keyword): try: # drama_keywords = {'월화드라마', '수목드라마', '금요/주말드라마', '일일/아침드라마'} # ent_keywords = {'월요일예능', '화요일예능', '수요일예능', '목요일예능', '금요일예능', '토요일예능', '일요일예능'} from framework.common.daum import headers, session from system.logic_site import SystemLogicSite url = 'https://search.daum.net/search?w=tot&q=%s' % py_urllib.quote( keyword.encode('utf8')) res = session.get(url, headers=headers, cookies=SystemLogicSite.get_daum_cookies()) html = res.content root = lxml.html.fromstring(html) list_program = root.xpath( '//ol[@class="list_program item_cont"]/li') data = [] for item in list_program: data_item = {} data_item['title'] = item.xpath('./div/strong/a/text()')[0] data_item['air_time'] = item.xpath('./div/span[1]/text()')[0] data_item['provider'] = item.xpath( './div/span[@class="txt_subinfo"][2]/text()')[0] data_item['image'] = item.xpath('./a/img/@src') data_item['scheduled'] = item.xpath( './div/span[@class="txt_subinfo"]/span[@class="txt_subinfo"]/text()' ) data_item['ratings'] = item.xpath( './div/span[@class="txt_subinfo"][2]/span[@class="f_red"]/text()' ) if len(data_item['image']): data_item['image'] = data_item['image'][0] else: data_item[ 'image'] = 'http://www.okbible.com/data/skin/okbible_1/images/common/noimage.gif' # data_item['image'] = 'https://search1.daumcdn.net/search/statics/common/pi/thumb/noimage_151203.png' if len(data_item['scheduled']): data_item['scheduled'] = data_item['scheduled'][0] if len(data_item['ratings']): data_item['ratings'] = data_item['ratings'][0] data.append(data_item) return data except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def process_telegram_data(self, data, target=None): try: ret = ModelBotDownloaderKtvItem.process_telegram_data(data) #logger.debug(ret) if ret is not None: if ModelSetting.get_bool('receive_info_send_telegram'): msg = '😉 TV 정보 수신\n' msg += '제목 : %s\n' % data['filename'] if ret is None: msg += '중복 마그넷입니다.' #TelegramHandle.sendMessage(msg) else: url = '%s/%s/api/%s/add_download?url=%s' % ( SystemModelSetting.get('ddns'), package_name, self.name, ret.magnet) if SystemModelSetting.get_bool('auth_use_apikey'): url += '&apikey=%s' % SystemModelSetting.get( 'auth_apikey') if app.config['config']['is_server']: msg += '\n' + ret.magnet + '\n' else: msg += '\n➕ 다운로드 추가\n<%s>\n' % url try: if ret.daum_id is not None: url = 'https://search.daum.net/search?w=tv&q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote( ret.daum_title.encode('utf8')), ret.daum_id) msg += '\n● Daum 정보\n%s' % url except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) ToolBaseNotify.send_message( msg, image_url=ret.daum_poster_url, message_id='bot_downloader_ktv_receive') self.invoke() try: if app.config['config']['is_server']: from tool_expand import TorrentProcess TorrentProcess.receive_new_data(ret, package_name) except: pass except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def get_play_info(source, source_id, quality, mode='url', return_format='json'): try: from .model import ModelCustom db_item = ModelCustom.get(source, source_id) # 2020-12-18 아마 한번 얻은 재생정보가 계속 유지할거라고 생각해서 아래 코드작성한것 같음. # 한달뒤에 재생 x #if db_item is not None and db_item.json is not None and quality in db_item.json: # data = db_item.json[quality] #else: # data = LogicKlive.get_url(source, source_id, quality, mode)['play_info'] # if db_item is not None: # db_item.set_play_info(quality, data) data = LogicKlive.get_url(source, source_id, quality, mode)['play_info'] if db_item is not None: db_item.set_play_info(quality, data) if return_format == 'json': return data elif return_format == 'strm': headers = [] for key, value in data['drm_key_request_properties'].items(): headers.append('{key}={value}'.format( key=key, value=py_urllib.quote(value))) tmp = """#EXTM3U #KODIPROP:inputstreamaddon=inputstream.adaptive #KODIPROP:inputstream.adaptive.license_type=com.widevine.alpha #KODIPROP:inputstream.adaptive.manifest_type=mpd #KODIPROP:inputstream.adaptive.license_key={drm_license_uri}|{headers}|R{{SSM}}| #EXTINF:-1,{ch_name} {uri}""".format(uri=data['uri'], drm_license_uri=data['drm_license_uri'], headers='&'.join(headers), ch_name=db_item.title) return tmp except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def get_channel_list(cls): try: data = Wavve.live_all_channels() ret = [] for item in data['list']: img = 'https://' + item['tvimage'] if item['tvimage'] != '' else '' if img != '': tmp = img.split('/') tmp[-1] = py_urllib.quote(tmp[-1].encode('utf8')) img = '/'.join(tmp) c = ModelChannel(cls.source_name, item['channelid'], item['channelname'], img, (item['type']=='video')) c.current = item['title'] ret.append(c) #logger.debug('%s - %s', item['channelname'], item['tvimage']) #if item['channelname'] in ['MBC', 'SBS']: # logger.debug(item) except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc()) return ret
def change_redirect_data(cls, data, proxy=None): try: #logger.debug(data) from system.model import ModelSetting as SystemModelSetting tmp = re.compile(r'http(.*?)$', re.MULTILINE).finditer(data) for m in tmp: u = m.group(0) u2 = '{ddns}/{package_name}/api/redirect?url={url}'.format( ddns=SystemModelSetting.get('ddns'), package_name=package_name, url=py_urllib.quote(u)) if SystemModelSetting.get_bool('auth_use_apikey'): u2 += '&apikey={apikey}'.format( apikey=SystemModelSetting.get('auth_apikey')) if proxy is not None: u2 += '&proxy=%s' % proxy data = data.replace(u, u2) #logger.debug(data) return data except Exception as e: logger.error('Exception:%s', e) logger.error(traceback.format_exc())
def send_scan_command(modelfile, plugin_name): entity = modelfile logger.debug('send_scan_command') try: server_url = db.session.query(ModelSetting).filter_by(key='server_url').first().value server_token = db.session.query(ModelSetting).filter_by(key='server_token').first().value if server_url == '': logger.debug('server_url is empty!') return callback_url = '%s/%s/api/scan_completed' % (SystemModelSetting.get('ddns'), plugin_name) filename = entity.plex_abspath if entity.plex_abspath is not None else os.path.join(entity.scan_abspath, entity.filename) logger.debug('send_scan_command PATH:%s ID:%s', entity.plex_abspath, entity.plex_section_id) encode_filename = Logic.get_filename_encoding_for_plex(filename) url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/WaitFile?section_id=%s&filename=%s&callback=%s&callback_id=%s&type_add_remove=ADD&call_from=FILE_MANAGER&X-Plex-Token=%s' % (server_url, entity.plex_section_id, encode_filename, py_urllib.quote(callback_url), entity.id, server_token) logger.debug('URL:%s', url) request = py_urllib2.Request(url) response = py_urllib2.urlopen(request) data = response.read() logger.debug(url) logger.debug('_send_scan_command ret:%s', data) entity.send_command_time = datetime.now() scan_server = db.session.query(ModelSetting).filter_by(key='scan_server').first().value if scan_server != '': servers = scan_server.split(',') for s in servers: try: s = s.strip() s_url, s_token = s.split('&') url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/WaitFile?section_id=&filename=%s&callback=&callback_id=&type_add_remove=ADD&call_from=FILE_MANAGER&X-Plex-Token=%s' % (s_url.strip(), encode_filename, s_token.strip()) #request = py_urllib2.Request(url) #response = py_urllib2.urlopen(request) #data = response.read() logger.debug(url) res = requests.get(url, timeout=30) logger.debug('scan_server : %s status_code:%s', res.status_code) except Exception as exception: logger.debug('Exception:%s', exception) logger.debug(traceback.format_exc()) #DBManager.update_status_download_korea_tv(entity) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def search_movie(keyword): try: url = 'https://apis.wavve.com/cf/search/list.js?keyword=%s&limit=20&offset=0&orderby=score&type=movie' % (py_urllib.quote(str(keyword))) response = session.get(url, headers=config['headers']) data = response.json() #logger.debug(url) if response.status_code == 200: if 'celllist' in data['cell_toplist']: return data['cell_toplist']['celllist'] else: if 'resultcode' in data: logger.debug(data['resultmessage']) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) return search2(keyword, 'all')
def get_daum_tv_info(search_name, daum_id=None, on_home=False, force_update=False): try: logger.debug('get_daum_tv_info 1 %s', search_name) #search_name = search_name.replace(u'[종영]', '') search_name = Logic.get_search_name_from_original(search_name) logger.debug('get_daum_tv_info 2 %s', search_name) # 2019-05-14 월화드라마, 수목드라마 if not force_update: if daum_id is not None: entity = ModelDaumTVShow.get(daum_id) if entity.update_time is not None and entity.status == 1: #방송종료 return entity if daum_id is not None: url = 'https://search.daum.net/search?w=tv&q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote(search_name.encode('utf8')), daum_id) else: url = 'https://search.daum.net/search?w=tv&q=%s' % ( py_urllib.quote(search_name.encode('utf8'))) from system import SystemLogicSite data = SystemLogicSite.get_text_daum(url) #logger.debug(data) logger.error(url) match = re.compile(r'irk\=(?P<id>\d+)').search(data) root = lxml.html.fromstring(data) daum_id = match.group('id') if match else '' entity = ModelDaumTVShow.get(daum_id) if not force_update: if entity.update_time is not None and entity.status == 1: #방송종료 return entity items = root.xpath('//*[@id="tv_program"]/div[1]/div[2]/strong') logger.error(items) if not items: return None if len(items) == 1: entity.title = items[0].text.strip() entity.title = entity.title.replace('?', '').replace(':', '') entity.status = 0 # 방송종료, 방송예정 items = root.xpath('//*[@id="tv_program"]/div[1]/div[2]/span') if items: if items[0].text.strip() == u'방송종료': entity.status = 1 elif items[0].text.strip() == u'방송예정': entity.status = 2 items = root.xpath('//*[@id="tv_program"]/div[1]/div[3]/span') # 2019-02-25 방송종료시 정보 없어짐 if items: entity.studio = items[0].text.strip() try: entity.broadcast_info = items[1].text.strip() except: pass try: entity.broadcast_term = items[2].text.strip() except: pass try: items = root.xpath( '//*[@id="tv_program"]/div[1]/div[2]/span') except: pass else: if on_home: logger.debug('on_home : %s', search_name) xml_root = Logic.get_show_info_on_home_title( search_name, daum_id=daum_id) home_ret = Logic.get_show_info_on_home(xml_root) if home_ret: entity.studio = home_ret['studio'] entity.broadcast_info = home_ret['broadcast_info'] entity.broadcast_term = home_ret['broadcast_term'] #방송예정은 items True가 되어 여기 안온다. match = re.compile(r'(\d{4}\.\d{1,2}\.\d{1,2})~').search( entity.broadcast_term) if match: entity.start_date = match.group(1) items = root.xpath('//*[@id="tv_program"]/div[1]/dl[1]/dd') if len(items) == 1: entity.genre = items[0].text.strip().split(' ')[0] entity.genre = entity.genre.split('(')[0].strip() items = root.xpath('//*[@id="tv_program"]/div[1]/dl[2]/dd') if len(items) == 1: entity.summary = items[0].text.replace(' ', ' ') items = root.xpath('//*[@id="tv_program"]/div[1]/div[1]/a/img') if len(items) == 1: entity.poster_url = items[0].attrib['src'] if items[0].attrib[ 'src'].startswith( 'http') else f"https:{items[0].attrib['src']}" items = root.xpath('//*[@id="clipDateList"]/li') entity.episode_list = {} if len(items) > 300: items = items[len(items) - 300:] today = int(datetime.now().strftime('%Y%m%d')) for item in items: try: a_tag = item.xpath('a') if len(a_tag) == 1: span_tag = a_tag[0].xpath('span[@class="txt_episode"]') if len(span_tag) == 1: if item.attrib['data-clip'] in entity.episode_list: #같은날짜 같은회차가 두번나올때가 있다. 버그로 보임 #EBS 초대석, 2010912 28회 if entity.episode_list[item.attrib[ 'data-clip']][0] == span_tag[ 0].text.strip().replace(u'회', ''): pass else: # 에피소드 넘버가 1차이가 날때만, 마지막꺼와 idx = len(entity.episode_list[ item.attrib['data-clip']]) - 1 _ = abs( int(entity.episode_list[ item.attrib['data-clip']][idx]) - int(span_tag[0].text.strip().replace( u'회', ''))) #2019-06-24 #슬플때사랑한다, 21, 22 순서 if _ <= 4: if item.attrib[ 'data-clip'] != '' and today >= int( item.attrib['data-clip']): entity.last_episode_date = item.attrib[ 'data-clip'] entity.last_episode_no = span_tag[ 0].text.strip().replace( u'회', '') entity.episode_list[ item.attrib['data-clip']].append( span_tag[0].text.strip( ).replace(u'회', '')) else: pass # Daum에 뜬금없는 에피소드가 끼어져있다 # 무시 else: if item.attrib[ 'data-clip'] != '' and today >= int( item.attrib['data-clip']): entity.last_episode_date = item.attrib[ 'data-clip'] entity.last_episode_no = span_tag[ 0].text.strip().replace(u'회', '') entity.episode_list[ item.attrib['data-clip']] = [ span_tag[0].text.strip().replace( u'회', '') ] except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) #전체 에피소드 갯수 : len(items) #에피소드 dict 갯수 len(entity.episode_list) #정확히 반이면 1일 2회 방송, 1/4이면 1일 4회 방송 # 2019-06-24 #if len(entity.episode_list) != 0 and len(items) % len(entity.episode_list) == 0: # entity.episode_count_one_day = len(items) / len(entity.episode_list) try: if len(entity.episode_list): entity.episode_count_one_day = int( round(float(len(items)) / len(entity.episode_list))) if entity.episode_count_one_day == 0: entity.episode_count_one_day = 1 else: entity.episode_count_one_day = 1 except: entity.episode_count_one_day = 1 entity.episode_list_json = json.dumps(entity.episode_list) entity.save() logger.debug('daum tv len(entity.episode_list) : %s %s %s', len(items), len(entity.episode_list), entity.episode_count_one_day) #logger.debug(entity.episode_list) #logger.debug(items) return entity except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def get_library_key_using_bundle(filepath, section_id=-1): try: url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/db_handle?action=get_metadata_id_by_filepath&args=%s&X-Plex-Token=%s' % (ModelSetting.get('server_url'), py_urllib.quote(filepath.encode('utf8')), ModelSetting.get('server_token')) data = requests.get(url).text return data except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def info(cls, code, title): try: if title == '모델': title = '드라마 모델' ret = {} show = EntityShow(cls.site_name, code) # 종영와, 방송중이 표현 정보가 다르다. 종영은 studio가 없음 url = 'https://search.daum.net/search?w=tv&q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote(str(title)), code[2:]) show.home = url root = SiteUtil.get_tree( url, proxy_url=SystemModelSetting.get('site_daum_proxy'), headers=cls.default_headers, cookies=SystemLogicSite.get_daum_cookies()) home_url = 'https://search.daum.net/search?q=%s&irk=%s&irt=tv-program&DA=TVP' % ( py_urllib.quote(str(title)), code[2:]) #logger.debug(home_url) home_root = SiteUtil.get_tree( home_url, proxy_url=SystemModelSetting.get('site_daum_proxy'), headers=cls.default_headers, cookies=SystemLogicSite.get_daum_cookies()) home_data = cls.get_show_info_on_home(home_root) #logger.debug('home_datahome_datahome_datahome_datahome_datahome_datahome_datahome_datahome_data') #logger.debug(home_data) tags = root.xpath('//*[@id="tv_program"]/div[1]/div[2]/strong') if len(tags) == 1: show.title = tags[0].text_content().strip() show.originaltitle = show.title show.sorttitle = show.title #unicodedata.normalize('NFKD', show.originaltitle) #logger.debug(show.sorttitle) """ tags = root.xpath('//*[@id="tv_program"]/div[1]/div[3]/span') # 이 정보가 없다면 종영 if tags: show.studio = tags[0].text_content().strip() summary = '' for tag in tags: entity.plot += tag.text.strip() entity.plot += ' ' match = re.compile(r'(\d{4}\.\d{1,2}\.\d{1,2})~').search(entity.plot) if match: show.premiered = match.group(1) """ show.studio = home_data['studio'] show.plot = home_data['desc'] match = re.compile( r'(?P<year>\d{4})\.(?P<month>\d{1,2})\.(?P<day>\d{1,2})' ).search(home_data['broadcast_term']) if match: show.premiered = match.group('year') + '-' + match.group( 'month').zfill(2) + '-' + match.group('day').zfill(2) show.year = int(match.group('year')) try: if show.year == '' and home_data['year'] != 0: show.year = home_data['year'] except: pass show.status = home_data['status'] show.genre = [home_data['genre']] show.episode = home_data['episode'] tmp = root.xpath('//*[@id="tv_program"]/div[1]/div[1]/a/img') #logger.debug(tmp) try: show.thumb.append( EntityThumb( aspect='poster', value=cls.process_image_url( root.xpath( '//*[@id="tv_program"]/div[1]/div[1]/a/img') [0].attrib['src']), site='daum', score=-10)) except: pass if True: tags = root.xpath('//ul[@class="col_size3 list_video"]/li') for idx, tag in enumerate(tags): if idx > 9: break a_tags = tag.xpath('.//a') if len(a_tags) == 2: thumb = cls.process_image_url( a_tags[0].xpath('.//img')[0].attrib['src']) video_url = a_tags[1].attrib['href'].split('/')[-1] title = a_tags[1].text_content() date = cls.change_date( tag.xpath('.//span')[0].text_content().strip()) content_type = 'Featurette' if title.find(u'예고') != -1: content_type = 'Trailer' show.extras.append( EntityExtra(content_type, title, 'kakao', video_url, premiered=date, thumb=thumb)) for i in range(1, 3): items = root.xpath('//*[@id="tv_casting"]/div[%s]/ul//li' % i) #logger.debug('CASTING ITEM LEN : %s' % len(items)) for item in items: actor = EntityActor(None) cast_img = item.xpath('div//img') #cast_img = item.xpath('.//img') if len(cast_img) == 1: actor.thumb = cls.process_image_url( cast_img[0].attrib['src']) #logger.debug(actor.thumb) span_tag = item.xpath('span') for span in span_tag: span_text = span.text_content().strip() tmp = span.xpath('a') if len(tmp) == 1: role_name = tmp[0].text_content().strip() tail = tmp[0].tail.strip() if tail == u'역': actor.type = 'actor' actor.role = role_name.strip() else: actor.name = role_name.strip() else: if span_text.endswith(u'역'): actor.role = span_text.replace(u'역', '') elif actor.name == '': actor.name = span_text.strip() else: actor.role = span_text.strip() if actor.type == 'actor' or actor.role.find(u'출연') != -1: show.actor.append(actor) elif actor.role.find(u'감독') != -1 or actor.role.find( u'연출') != -1: show.director.append(actor) elif actor.role.find(u'제작') != -1 or actor.role.find( u'기획') != -1 or actor.role.find(u'책임프로듀서') != -1: show.director.append(actor) elif actor.role.find(u'극본') != -1 or actor.role.find( u'각본') != -1: show.credits.append(actor) elif actor.name != u'인물관계도': show.actor.append(actor) # 에피소드 items = root.xpath('//*[@id="clipDateList"]/li') #show.extra_info['episodes'] = {} for item in items: epi = {} a_tag = item.xpath('a') if len(a_tag) != 1: continue epi['url'] = 'https://search.daum.net/search%s' % a_tag[ 0].attrib['href'] tmp = item.attrib['data-clip'] epi['premiered'] = tmp[0:4] + '-' + tmp[4:6] + '-' + tmp[6:8] match = re.compile(r'(?P<no>\d+)%s' % u'회').search( a_tag[0].text_content().strip()) if match: epi['no'] = int(match.group('no')) show.extra_info['episodes'][epi['no']] = { 'daum': { 'code': cls.module_char + cls.site_char + epi['url'], 'premiered': epi['premiered'] } } tags = root.xpath( '//*[@id="tv_program"]//div[@class="clipList"]//div[@class="mg_expander"]/a' ) show.extra_info['kakao_id'] = None if tags: tmp = tags[0].attrib['href'] show.extra_info['kakao_id'] = re.compile( '/(?P<id>\d+)/').search(tmp).group('id') tags = root.xpath( "//a[starts-with(@href, 'http://www.tving.com/vod/player')]") #tags = root.xpath('//a[@contains(@href, "tving.com")') if tags: show.extra_info['tving_episode_id'] = tags[0].attrib[ 'href'].split('/')[-1] ret['ret'] = 'success' ret['data'] = show.as_dict() except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) ret['ret'] = 'exception' ret['data'] = str(exception) return ret
def os_path_exists(filepath): try: url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/os_path_exists?filepath=%s&X-Plex-Token=%s' % (ModelSetting.get('server_url'), py_urllib.quote(filepath.encode('utf8')), ModelSetting.get('server_token')) data = requests.get(url).text return (data=='True') except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) return False
def normalize(self, str): return py_urllib.quote(py_urllib.unquote(str), '')
def search_movie_web(movie_list, movie_name, movie_year): try: #movie_list = [] url = 'https://suggest-bar.daum.net/suggest?id=movie&cate=movie&multiple=1&mod=json&code=utf_in_out&q=%s' % ( py_urllib.quote(movie_name.encode('utf8'))) from system import SystemLogicSite data = SystemLogicSite.get_response_daum(url).json() #data = get_json(url) for index, item in enumerate(data['items']['movie']): tmps = item.split('|') score = 85 - (index * 5) if tmps[0].find(movie_name) != -1 and tmps[3] == movie_year: score = 95 elif tmps[3] == movie_year: score = score + 5 if score < 10: score = 10 MovieSearch.movie_append( movie_list, { 'id': tmps[1], 'title': tmps[0], 'year': tmps[3], 'score': score }) except Exception as exception: log_error('Exception:%s', exception) log_error(traceback.format_exc()) try: url = 'https://search.daum.net/search?nil_suggest=btn&w=tot&DA=SBC&q=%s%s' % ( '%EC%98%81%ED%99%94+', py_urllib.quote(movie_name.encode('utf8'))) ret = MovieSearch.get_movie_info_from_home(url) if ret is not None: # 부제목때문에 제목은 체크 하지 않는다. # 홈에 검색한게 년도도 같다면 score : 100을 주고 다른것은 검색하지 않는다. if ret['year'] == movie_year: score = 100 need_another_search = False else: score = 90 need_another_search = True MovieSearch.movie_append( movie_list, { 'id': ret['daum_id'], 'title': ret['title'], 'year': ret['year'], 'score': score, 'country': ret['country'], 'more': ret['more'] }) log_debug('need_another_search : %s' % need_another_search) movie = ret['movie'] if need_another_search: tmp = movie.find('div[@class="coll_etc"]') if tmp is not None: tag_list = tmp.findall('.//a') first_url = None for tag in tag_list: match = re.compile(r'(.*?)\((.*?)\)').search( tag.text_content()) if match: daum_id = tag.attrib['href'].split('||')[1] score = 80 if match.group( 1) == movie_name and match.group( 2) == movie_year: first_url = 'https://search.daum.net/search?%s' % tag.attrib[ 'href'] elif match.group( 2 ) == movie_year and first_url is not None: first_url = 'https://search.daum.net/search?%s' % tag.attrib[ 'href'] MovieSearch.movie_append( movie_list, { 'id': daum_id, 'title': match.group(1), 'year': match.group(2), 'score': score }) #results.Append(MetadataSearchResult(id=daum_id, name=match.group(1), year=match.group(2), score=score, lang=lang)) log_debug('first_url : %s' % first_url) if need_another_search and first_url is not None: new_ret = MovieSearch.get_movie_info_from_home( first_url) MovieSearch.movie_append( movie_list, { 'id': new_ret['daum_id'], 'title': new_ret['title'], 'year': new_ret['year'], 'score': 100, 'country': new_ret['country'], 'more': new_ret['more'] }) #시리즈 tmp = movie.find('.//ul[@class="list_thumb list_few"]') log_debug('SERIES:%s' % tmp) if tmp is not None: tag_list = tmp.findall('.//div[@class="wrap_cont"]') first_url = None score = 80 for tag in tag_list: a_tag = tag.find('a') daum_id = a_tag.attrib['href'].split('||')[1] daum_name = a_tag.text_content() span_tag = tag.find('span') year = span_tag.text_content() log_debug('daum_id:%s %s %s' % (daum_id, year, daum_name)) if daum_name == movie_name and year == movie_year: first_url = 'https://search.daum.net/search?%s' % a_tag.attrib[ 'href'] elif year == movie_year and first_url is not None: first_url = 'https://search.daum.net/search?%s' % tag.attrib[ 'href'] MovieSearch.movie_append( movie_list, { 'id': daum_id, 'title': daum_name, 'year': year, 'score': score }) log_debug('first_url : %s' % first_url) if need_another_search and first_url is not None: new_ret = MovieSearch.get_movie_info_from_home( first_url) MovieSearch.movie_append( movie_list, { 'id': new_ret['daum_id'], 'title': new_ret['title'], 'year': new_ret['year'], 'score': 100, 'country': new_ret['country'], 'more': new_ret['more'] }) except Exception as exception: log_error('Exception:%s', exception) log_error(traceback.format_exc()) movie_list = list( reversed(sorted(movie_list, key=lambda k: k['score']))) return movie_list
def is_exist_in_library_using_bundle(filepath): try: url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/count_in_library?filename=%s&X-Plex-Token=%s' % (ModelSetting.get('server_url'), py_urllib.quote(filepath.encode('utf8')), ModelSetting.get('server_token')) data = requests.get(url).text if data == '0': return False else: try: tmp = int(data) if tmp > 0: return True except: return False except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) return False
def send_scan_command2(plugin_name, section_id, filename, callback_id, type_add_remove, call_from, callback_url=None): logger.debug('send_scan_command2') try: server_url = db.session.query(ModelSetting).filter_by(key='server_url').first().value server_token = db.session.query(ModelSetting).filter_by(key='server_token').first().value if callback_url is None: callback_url = '%s/%s/api/scan_completed' % (SystemModelSetting.get('ddns'), plugin_name) logger.debug('send_scan_command PATH:%s ID:%s', filename, section_id) #url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/WaitFile?section_id=%s&filename=%s&callback=%s&callback_id=%s&type_add_remove=%s&call_from=%s&X-Plex-Token=%s' % (server_url, section_id, urllib.quote(filename.encode('cp949')), urllib.quote(callback_url), callback_id, type_add_remove, call_from, server_token) encode_filename = Logic.get_filename_encoding_for_plex(filename) url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/WaitFile?section_id=%s&filename=%s&callback=%s&callback_id=%s&type_add_remove=%s&call_from=%s&X-Plex-Token=%s' % (server_url, section_id, encode_filename, py_urllib.quote(callback_url), callback_id, type_add_remove, call_from, server_token) logger.debug('URL:%s', url) request = py_urllib2.Request(url) response = py_urllib2.urlopen(request) data = response.read() logger.debug('_send_scan_command ret:%s', data) scan_server = db.session.query(ModelSetting).filter_by(key='scan_server').first().value if scan_server != '': servers = scan_server.split(',') for s in servers: try: s = s.strip() s_url, s_token = s.split('&') url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/WaitFile?section_id=&filename=%s&callback=&callback_id=&type_add_remove=%s&call_from=%s&X-Plex-Token=%s' % (s_url.strip(), encode_filename, type_add_remove, call_from, s_token.strip()) request = py_urllib2.Request(url) response = py_urllib2.urlopen(request) s_data = response.read() logger.debug('scan_server2 : %s ret:%s', s_url, s_data) except Exception as exception: logger.debug('Exception:%s', exception) logger.debug(traceback.format_exc()) return data except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def search_tv(keyword): #return search(keyword, search_type='vodkeywordlist') try: return search2(keyword, 'program') #param = get_baseparameter() #param['keyword'] = keyword url = 'https://apis.wavve.com/cf/search/band.js?type=program&keyword=%s&offset=0&limit=20&orderby=score&isplayymovie=y&apikey=E5F3E0D30947AA5440556471321BB6D9&&device=pc&drm=wm&partner=pooq&pooqzone=none®ion=kor&targetage=all' % (py_urllib.quote(str(keyword))) response = session.get(url, headers=config['headers']) data = response.json() #logger.debug(url) if response.status_code == 200: if 'celllist' in data['band']: return data['band']['celllist'] else: if 'resultcode' in data: logger.debug(data['resultmessage']) except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def search_api(cls, keyword): trans_papago_key = SystemModelSetting.get_list('trans_papago_key') for tmp in trans_papago_key: client_id, client_secret = tmp.split(',') try: if client_id == '' or client_id is None or client_secret == '' or client_secret is None: return text url = "https://openapi.naver.com/v1/search/movie.json?query=%s&display=100" % py_urllib.quote( str(keyword)) requesturl = py_urllib2.Request(url) requesturl.add_header("X-Naver-Client-Id", client_id) requesturl.add_header("X-Naver-Client-Secret", client_secret) #response = py_urllib2.urlopen(requesturl, data = data.encode("utf-8")) response = py_urllib2.urlopen(requesturl) if sys.version_info[0] == 2: data = json.load(response, encoding='utf8') else: data = json.load(response) rescode = response.getcode() if rescode == 200: return data else: continue except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc())
def find_by_filename_part(keyword): try: query = "SELECT metadata_items.id, media_items.id, file, media_items.duration, media_items.bitrate, media_parts.created_at, media_items.size, media_items.width, media_items.height, media_items.video_codec, media_items.audio_codec FROM media_parts, media_items, metadata_items WHERE media_parts.media_item_id = media_items.id and media_items.metadata_item_id = metadata_items.id and LOWER(media_parts.file) LIKE '%{keyword}%' and media_items.width > 0 ORDER BY media_items.bitrate DESC".format(keyword=keyword) url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/db_query?query=%s&X-Plex-Token=%s' % (ModelSetting.get('server_url'), py_urllib.quote(query.encode('utf8')), ModelSetting.get('server_token')) data1 = requests.get(url).json() query = "SELECT metadata_items.id, media_items.id, file, media_streams.url FROM media_parts, media_items, metadata_items, media_streams WHERE media_streams.media_item_id = media_items.id and media_parts.media_item_id = media_items.id and media_items.metadata_item_id = metadata_items.id and media_streams.stream_type_id = 3 and media_parts.file LIKE '%{keyword}%' ORDER BY media_items.bitrate DESC".format(keyword=keyword) url = '%s/:/plugins/com.plexapp.plugins.SJVA/function/db_query?query=%s&X-Plex-Token=%s' % (ModelSetting.get('server_url'), py_urllib.quote(query.encode('utf8')), ModelSetting.get('server_token')) data2 = requests.get(url).json() #logger.debug(data2) #logger.debug(data1) ret = {'ret' : True} ret['list'] = [] ret['metadata_id'] = [] for tmp in data1['data']: if tmp == '': continue tmp = tmp.split('|') item = {} item['metadata_id'] = '/library/metadata/%s' % tmp[0] item['media_id'] = tmp[1] item['filepath'] = tmp[2] item['filename'] = tmp[2] lastindex = 0 if tmp[2][0] == '/': lastindex = tmp[2].rfind('/') else: lastindex = tmp[2].rfind('\\') item['dir'] = tmp[2][:lastindex] item['filename'] = tmp[2][lastindex+1:] item['duration'] = int(tmp[3]) item['bitrate'] = int(tmp[4]) item['created_at'] = tmp[5] item['size'] = int(tmp[6]) item['size_str'] = Util.sizeof_fmt(item['size'], suffix='B') item['width'] = int(tmp[7]) item['height'] = int(tmp[8]) item['video_codec'] = tmp[9] item['audio_codec'] = tmp[10] ret['list'].append(item) if item['metadata_id'] not in ret['metadata_id']: ret['metadata_id'].append(item['metadata_id']) for tmp in data2['data']: if tmp == '': continue tmp = tmp.split('|') for item in ret['list']: if item['media_id'] == tmp[1] and item['filepath'] == tmp[2]: item['sub'] = tmp[3] break logger.debug(ret) return ret except Exception as exception: logger.error('Exception:%s', exception) logger.error(traceback.format_exc()) return None