def show_process(data, con, cur): data['meta'] = {'remove': 0} data['meta']['metapath'] = os.path.join( ModelSetting.get('base_path_metadata'), 'TV Shows', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle") data['meta']['total'] = ToolBaseFile.size( start_path=data['meta']['metapath']) if data['command'] == 'start0': return combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') if os.path.exists(combined_xmlpath) == False: return data['use_filepath'] = [] data['remove_filepath'] = [] data['seasons'] = {} data['media'] = {'total': 0, 'remove': 0} ret = Task.xml_analysis(combined_xmlpath, data, data) if ret == False: logger.warning(f"{data['db']['title']} 쇼 분석 실패") return season_cs = con.execute( 'SELECT * FROM metadata_items WHERE metadata_type = 3 and parent_id = ? ORDER BY "index"', (data['db']['id'], )) season_cs.row_factory = dict_factory for season in season_cs.fetchall(): episode_cs = con.execute( 'SELECT * FROM metadata_items WHERE metadata_type = 4 and parent_id = ? ORDER BY "index"', (season['id'], )) episode_cs.row_factory = dict_factory for episode in episode_cs.fetchall(): season_index = season['index'] episode_index = episode['index'] if episode['index'] == -1: if episode['available_at'] is not None: episode_index = episode['available_at'].split(' ')[0] else: episode_index = episode[ 'originally_available_at'].split(' ')[0] #season_index = episode_index.split('-')[0] if season_index not in data['seasons']: data['seasons'][season_index] = {'db': season} combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'seasons', f"{season_index}.xml") ret = Task.xml_analysis(combined_xmlpath, data['seasons'][season_index], data) if ret == False: logger.warning(combined_xmlpath) logger.warning( f"{data['db']['title']} 시즌 분석 실패 : season_index - {season_index}" ) #logger.warning(combined_xmlpath) #return data['seasons'][season_index]['episodes'] = {} data['seasons'][season_index]['episodes'][episode_index] = { 'db': episode } combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'seasons', f"{season_index}", "episodes", f"{episode_index}.xml") ret = Task.xml_analysis( combined_xmlpath, data['seasons'][season_index]['episodes'][episode_index], data, is_episode=True) if ret == False: logger.warning(combined_xmlpath) #logger.warning(d(episode)) logger.warning(f"{data['db']['title']} 에피소드 분석 실패") #del data['seasons'][season_index]['episodes'][episode_index] #return #logger.warning(d(data['use_filepath'])) #logger.warning(d(data)) query = "" if data['command'] in ['start22', 'start3', 'start4']: # 쇼 http로 sql = 'UPDATE metadata_items SET ' if data['process']['poster']['url'] != '': sql += ' user_thumb_url = "{}", '.format( data['process']['poster']['url']) try: data['use_filepath'].remove( data['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['poster']['realpath']) except: pass if data['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( data['process']['art']['url']) try: data['use_filepath'].remove( data['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['art']['realpath']) except: pass if data['process']['banner']['url'] != '': sql += ' user_banner_url = "{}", '.format( data['process']['banner']['url']) try: data['use_filepath'].remove( data['process']['banner']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['banner']['realpath']) except: pass if data['process']['theme']['url'] != '': sql += ' user_music_url = "{}", '.format( data['process']['theme']['url']) if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(data['db']['id']) query += sql for season_index, season in data['seasons'].items(): if 'process' not in season: continue sql = 'UPDATE metadata_items SET ' if season['process']['poster']['url'] != '': sql += ' user_thumb_url = "{}", '.format( season['process']['poster']['url']) try: data['use_filepath'].remove( season['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( season['process']['poster']['realpath']) except: pass if season['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( season['process']['art']['url']) try: data['use_filepath'].remove( season['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( season['process']['art']['realpath']) except: pass if season['process']['banner']['url'] != '': sql += ' user_banner_url = "{}", '.format( season['process']['banner']['url']) try: data['use_filepath'].remove( season['process']['banner']['localpath']) except: pass try: data['use_filepath'].remove( season['process']['banner']['realpath']) except: pass if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(season['db']['id']) query += sql if data['command'] in ['start21', 'start22', 'start3', 'start4']: for season_index, season in data['seasons'].items(): for episode_index, episode in season['episodes'].items(): #logger.warning(episode['process']['thumb']) media_item_cs = con.execute( 'SELECT * FROM media_items WHERE metadata_item_id = ? ORDER BY id', (episode['db']['id'], )) media_item_cs.row_factory = dict_factory episode['media_list'] = [] for media_item in media_item_cs.fetchall(): media_part_cs = con.execute( 'SELECT * FROM media_parts WHERE media_item_id = ? ORDER BY id', (media_item['id'], )) media_part_cs.row_factory = dict_factory for media_part in media_part_cs.fetchall(): media_hash = media_part['hash'] #logger.warning(f" 파일 : {media_part['file']} {media_hash}") mediapath = os.path.join( ModelSetting.get('base_path_media'), 'localhost', media_hash[0], f"{media_hash[1:]}.bundle", 'Contents', 'Thumbnails', 'thumb1.jpg') if os.path.exists(mediapath): #logger.warning("미디오 썸네일 있음") episode['media_list'].append(mediapath) data['media']['total'] = os.path.getsize( mediapath) #data['remove_size'] += os.stat(mediapath).st_size #os.remove(mediapath) #media://0/10c056239442666d0931c90996ff69673861d95.bundle/Contents/Thumbnails/thumb1.jpg # 2021-11-01 # 4단계 미디어파일을 디코에 올리고 그 url로 대체한다. # if data['command'] == 'start4' and episode['process'][ 'thumb']['db_type'] == 'media': localpath = os.path.join( ModelSetting.get('base_path_media'), 'localhost', episode['process']['thumb']['db'].replace( 'media://', '')) if localpath[0] != '/': localpath = localpath.replace('/', '\\') if os.path.exists(localpath): if data['dryrun'] == False: discord_url = ToolExpandDiscord.discord_proxy_image_localfile( localpath) if discord_url is not None: episode['process']['thumb'][ 'url'] = discord_url logger.warning(discord_url) else: #logger.warning(episode) logger.warning(f"썸네일 없음 1: {episode['db']['id']}") PlexWebHandle.analyze_by_id(episode['db']['id']) if data['command'] == 'start4' and episode['process'][ 'thumb']['db'] == '': logger.warning(f"썸네일 없음 분석 2: {episode['db']['id']}") PlexWebHandle.analyze_by_id(episode['db']['id']) if episode['process']['thumb']['url'] != '': query += f'UPDATE metadata_items SET user_thumb_url = "{episode["process"]["thumb"]["url"]}" WHERE id = {episode["db"]["id"]};\n' try: data['use_filepath'].remove( episode['process']['thumb']['localpath']) except: pass try: data['use_filepath'].remove( episode['process']['thumb']['realpath']) except: pass if data['command'] in ['start3', 'start4']: for mediafilepath in episode['media_list']: if os.path.exists(mediapath): data['media']['remove'] += os.path.getsize( mediapath) if data['dryrun'] == False: os.remove(mediapath) elif episode['process']['thumb']['db'] == '': if len(episode['media_list']) > 0: tmp = f"media://{episode['media_list'][0].split('localhost/')[1]}" query += f'UPDATE metadata_items SET user_thumb_url = "{tmp}" WHERE id = {episode["db"]["id"]};\n' if data['dryrun'] == False and data['command'] in [ 'start3', 'start4' ]: for mediafilepath in episode['media_list']: content_folder = os.path.dirname( os.path.dirname(mediafilepath)) for base, folders, files in os.walk( content_folder): if not folders and not files: os.removedirs(base) #logger.error(data['command']) #logger.error(query) if query != '' and data['dryrun'] == False: PlexDBHandle.execute_query(query) #logger.error(data['meta']['remove'] ) for base, folders, files in os.walk(data['meta']['metapath']): for f in files: data['file_count'] += 1 filepath = os.path.join(base, f) #if filepath.find('themes') == -1: # continue if filepath not in data['use_filepath']: if os.path.exists(filepath): data['remove_count'] += 1 if filepath not in data['remove_filepath']: data['remove_filepath'].append(filepath) if os.path.islink(filepath) == False: data['meta']['remove'] += os.path.getsize(filepath) #logger.error(filepath) if data['dryrun'] == False: os.remove(filepath) for base, folders, files in os.walk(data['meta']['metapath']): if not folders and not files: os.removedirs(base) if data['command'] == 'start1': return
def baseapi(sub): try: if sub == 'image': from PIL import Image # 2020-06-02 proxy 사용시 포스터처리 image_url = request.args.get('url') logger.debug(image_url) method = ModelSetting.get('javdb_landscape_poster') if method == '0': if FileProcess.Vars.proxies is None: return redirect(image_url) else: im = Image.open( requests.get(image_url, stream=True, proxies=FileProcess.Vars.proxies).raw) filename = os.path.join(path_data, 'tmp', 'rotate.jpg') im.save(filename) return send_file(filename, mimetype='image/jpeg') im = Image.open( requests.get(image_url, stream=True, proxies=FileProcess.Vars.proxies).raw) width, height = im.size logger.debug(width) logger.debug(height) if height > width * 1.5: return redirect(image_url) if method == '1': if width > height: im = im.rotate(-90, expand=True) elif method == '2': if width > height: im = im.rotate(90, expand=True) elif method == '3': new_height = int(width * 1.5) new_im = Image.new('RGB', (width, new_height)) new_im.paste(im, (0, int((new_height - height) / 2))) im = new_im filename = os.path.join(path_data, 'tmp', 'rotate.jpg') im.save(filename) return send_file(filename, mimetype='image/jpeg') elif sub == 'image_proxy': from PIL import Image image_url = py_urllib.unquote_plus(request.args.get('url')) proxy_url = request.args.get('proxy_url') if proxy_url is not None: proxy_url = py_urllib.unquote_plus() logger.debug('image_url : %s', image_url) #2020-09-21 핸드쉐이크 에러 from system.logic_command import SystemLogicCommand filename = os.path.join(path_data, 'tmp', 'proxy_%s.jpg' % str(time.time())) #im = Image.open(requests.get(image_url, stream=True, verify=False, proxies=FileProcess.Vars.proxies).raw) #im.save(filename) if proxy_url is not None and proxy_url != '': # 알파인 도커 wget 에 -e 옵션 안먹음 #tmp = image_url.split('//') #if len(tmp) == 2: # image_url = tmp[1] #command = ['wget', '-O', filename, image_url, '-e', 'use_proxy=yes', '-e', 'http_proxy=%s' % ModelSetting.get('proxy_url').replace('https://', '').replace('http://', '')] #command = ['curl', '-o', filename, image_url, '-x', proxy_url.replace('https://', '').replace('http://', '')] command = ['curl', '-o', filename, image_url, '-x', proxy_url] logger.debug(' '.join(command)) ret = SystemLogicCommand.execute_command_return(command) else: #tmp = image_url.split('//') #if len(tmp) == 2: # image_url = tmp[1] ret = SystemLogicCommand.execute_command_return( ['curl', '-o', filename, image_url]) return send_file(filename, mimetype='image/jpeg') elif sub == 'discord_proxy': from tool_expand import ToolExpandDiscord image_url = py_urllib.unquote_plus(request.args.get('url')) ret = ToolExpandDiscord.discord_proxy_image(image_url) #logger.debug(ret) return redirect(ret) from PIL import Image im = Image.open(requests.get(ret, stream=True, verify=False).raw) filename = os.path.join(path_data, 'tmp', 'proxy.jpg') im.save(filename) return send_file(filename, mimetype='image/jpeg') #elif sub == 'youtube': # command = ['youtube-dl', '-f', 'best', '-g', 'https://www.youtube.com/watch?v=%s' % request.args.get('youtube_id')] # from system.logic_command import SystemLogicCommand # ret = SystemLogicCommand.execute_command_return(command).strip() # return jsonify({'ret':'success', 'url':ret}) elif sub == 'video': site = request.args.get('site') param = request.args.get('param') if site == 'naver': from lib_metadata import SiteNaverMovie ret = SiteNaverMovie.get_video_url(param) elif site == 'youtube': command = [ 'youtube-dl', '-f', 'best', '-g', 'https://www.youtube.com/watch?v=%s' % request.args.get('param') ] from system.logic_command import SystemLogicCommand ret = SystemLogicCommand.execute_command_return( command).strip() elif site == 'kakao': url = 'https://tv.kakao.com/katz/v2/ft/cliplink/{}/readyNplay?player=monet_html5&profile=HIGH&service=kakao_tv§ion=channel&fields=seekUrl,abrVideoLocationList&startPosition=0&tid=&dteType=PC&continuousPlay=false&contentType=&{}'.format( param, int(time.time())) data = requests.get(url).json() #logger.debug(json.dumps(data, indent=4)) ret = data['videoLocation']['url'] logger.debug(ret) return redirect(ret) """ elif sub == 'image_process': mode = request.args.get('mode') if mode == 'landscape_to_poster': from PIL import Image image_url = py_urllib.unquote_plus(request.args.get('url')) im = Image.open(requests.get(image_url, stream=True).raw) width, height = im.size left = width/1.895734597 top = 0 right = width bottom = height filename = os.path.join(path_data, 'tmp', 'proxy_%s.jpg' % str(time.time()) ) poster = im.crop((left, top, right, bottom)) poster.save(filename) return send_file(filename, mimetype='image/jpeg') """ except Exception as e: logger.debug('Exception:%s', e) logger.debug(traceback.format_exc())
def discord_proxy_image_localfile(cls, filepath): from tool_expand import ToolExpandDiscord return ToolExpandDiscord.discord_proxy_image_localfile(filepath)
def api(sub): try: if sub == 'search': arg = request.args.get('code') ret = FileProcess.search(arg) ret = list(reversed(ret)) elif sub == 'update': arg = request.args.get('code') ret = FileProcess.update( arg, use_discord_proxy=ModelSetting.get_bool('use_discord_proxy')) elif sub == 'image': from PIL import Image import requests # 2020-06-02 proxy 사용시 포스터처리 image_url = request.args.get('url') logger.debug(image_url) method = ModelSetting.get('javdb_landscape_poster') if method == '0': if FileProcess.Vars.proxies is None: return redirect(image_url) else: im = Image.open( requests.get(image_url, stream=True, proxies=FileProcess.Vars.proxies).raw) filename = os.path.join(path_data, 'tmp', 'rotate.jpg') im.save(filename) return send_file(filename, mimetype='image/jpeg') im = Image.open( requests.get(image_url, stream=True, proxies=FileProcess.Vars.proxies).raw) width, height = im.size logger.debug(width) logger.debug(height) if height > width * 1.5: return redirect(image_url) if method == '1': if width > height: im = im.rotate(-90, expand=True) elif method == '2': if width > height: im = im.rotate(90, expand=True) elif method == '3': new_height = int(width * 1.5) new_im = Image.new('RGB', (width, new_height)) new_im.paste(im, (0, int((new_height - height) / 2))) im = new_im filename = os.path.join(path_data, 'tmp', 'rotate.jpg') im.save(filename) return send_file(filename, mimetype='image/jpeg') elif sub == 'image_proxy': from PIL import Image import requests #requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = 'DES-CBC3-SHA' image_url = request.args.get('url') logger.debug('image_url : %s', image_url) #2020-09-21 핸드쉐이크 에러 from system.logic_command import SystemLogicCommand filename = os.path.join(path_data, 'tmp', 'proxy_%s.jpg' % str(time.time())) #im = Image.open(requests.get(image_url, stream=True, verify=False, proxies=FileProcess.Vars.proxies).raw) #im.save(filename) if ModelSetting.get_bool('use_proxy'): # 알파인 도커 wget 에 -e 옵션 안먹음 #tmp = image_url.split('//') #if len(tmp) == 2: # image_url = tmp[1] #command = ['wget', '-O', filename, image_url, '-e', 'use_proxy=yes', '-e', 'http_proxy=%s' % ModelSetting.get('proxy_url').replace('https://', '').replace('http://', '')] command = [ 'curl', '-o', filename, image_url, '-x', ModelSetting.get('proxy_url').replace('https://', '').replace( 'http://', '') ] logger.debug(' '.join(command)) ret = SystemLogicCommand.execute_command_return(command) else: tmp = image_url.split('//') if len(tmp) == 2: image_url = tmp[1] ret = SystemLogicCommand.execute_command_return( ['curl', '-o', filename, image_url]) return send_file(filename, mimetype='image/jpeg') elif sub == 'discord_proxy': from tool_expand import ToolExpandDiscord image_url = request.args.get('url') ret = ToolExpandDiscord.discord_proxy_image( image_url, webhook_url=ModelSetting.get('discord_proxy_webhook_url')) #logger.debug(ret) #return redirect(ret) from PIL import Image import requests im = Image.open(requests.get(ret, stream=True, verify=False).raw) filename = os.path.join(path_data, 'tmp', 'proxy.jpg') im.save(filename) return send_file(filename, mimetype='image/jpeg') return jsonify(ret) except Exception as e: logger.debug('Exception:%s', e) logger.debug(traceback.format_exc())
def discord_proxy_image(cls, image_url): from tool_expand import ToolExpandDiscord return ToolExpandDiscord.discord_proxy_image(image_url)
def discord_proxy_set_target_poster(cls, source, target): from tool_expand import ToolExpandDiscord return ToolExpandDiscord.discord_proxy_set_target( source + 'av_poster', target)
def discord_proxy_get_target_poster(cls, image_url): from tool_expand import ToolExpandDiscord return ToolExpandDiscord.discord_proxy_get_target(image_url + 'av_poster')
def artist_process(data, con, cur): data['meta'] = {'remove': 0} data['meta']['metapath'] = os.path.join( ModelSetting.get('base_path_metadata'), 'Artists', data['db']['hash'][0], f"{data['db']['hash'][1:]}.bundle") data['meta']['total'] = ToolBaseFile.size( start_path=data['meta']['metapath']) if data['command'] == 'start0': return combined_xmlpath = os.path.join(data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') if os.path.exists(combined_xmlpath) == False: return data['use_filepath'] = [] data['remove_filepath'] = [] data['albums'] = {} ret = Task.xml_analysis(combined_xmlpath, data) if ret == False: logger.warning(f"{data['db']['title']} 아티스트 분석 실패") return # 2022-05-11 앨범은 인덱스 모두 1임. # 트랙은 순서대로 있음 #album_cs = con.execute('SELECT * FROM metadata_items WHERE metadata_type = 9 and parent_id = ? ORDER BY "index"', (data['db']['id'],)) album_cs = con.execute( 'SELECT * FROM metadata_items WHERE metadata_type = 9 and parent_id = ?', (data['db']['id'], )) album_cs.row_factory = dict_factory data['albums'] = [] for album in album_cs.fetchall(): #album_index = album['index'] #logger.warning(album_index) #if album_index not in data['albums']: #data['albums'][album_index] = {'db':album, 'use_filepath':[], 'remove_filepath':[]} album_data = { 'db': album, 'use_filepath': [], 'remove_filepath': [] } album_data['meta'] = {'remove': 0} album_data['meta']['metapath'] = os.path.join( ModelSetting.get('base_path_metadata'), 'Albums', album_data['db']['hash'][0], f"{album_data['db']['hash'][1:]}.bundle") data['meta']['total'] += ToolBaseFile.size( start_path=album_data['meta']['metapath']) combined_xmlpath = os.path.join(album_data['meta']['metapath'], 'Contents', '_combined', 'Info.xml') ret = Task.xml_analysis(combined_xmlpath, album_data) if ret == False: logger.warning(combined_xmlpath) logger.warning(f"{album_data['db']['title']} 앨범 분석 실패") else: data['albums'].append(album_data) query = "" #logger.debug(d(data)) if data['command'] == 'start2': # 쇼 http로 sql = 'UPDATE metadata_items SET ' if data['process']['poster']['url'] != '': sql += ' user_thumb_url = "{}", '.format( data['process']['poster']['url']) try: data['use_filepath'].remove( data['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['poster']['realpath']) except: pass if data['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( data['process']['art']['url']) try: data['use_filepath'].remove( data['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( data['process']['art']['realpath']) except: pass if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(data['db']['id']) query += sql for album in data['albums']: if 'process' not in album: continue sql = 'UPDATE metadata_items SET ' # localmedia 로 생성된 파일은 url이 셋된다. #if album['process']['poster']['url'] != '': # 2022-05-11 태그로 생성된 앨범은 디스코드에 올리고 셋 if album['process']['poster']['url'] != '' and album[ 'process']['poster']['url'].startswith( 'http') == False: if 'localpath' in album['process']['poster'] and album[ 'process']['poster']['localpath'] != '': localpath = album['process']['poster']['localpath'] if localpath[0] != '/': localpath = localpath.replace('/', '\\') if os.path.exists(localpath): if data['dryrun'] == False: discord_url = ToolExpandDiscord.discord_proxy_image_localfile( localpath) if discord_url is not None: album['process']['poster'][ 'url'] = discord_url logger.warning(discord_url) if album['process']['poster']['url'].startswith('http'): sql += ' user_thumb_url = "{}", '.format( album['process']['poster']['url']) try: data['use_filepath'].remove( album['process']['poster']['localpath']) except: pass try: data['use_filepath'].remove( album['process']['poster']['realpath']) except: pass if album['process']['art']['url'] != '': sql += ' user_art_url = "{}", '.format( album['process']['art']['url']) try: data['use_filepath'].remove( album['process']['art']['localpath']) except: pass try: data['use_filepath'].remove( album['process']['art']['realpath']) except: pass if sql != 'UPDATE metadata_items SET ': sql = sql.strip().rstrip(',') sql += ' WHERE id = {} ;\n'.format(album['db']['id']) query += sql #logger.error(data['command']) #logger.error(query) if query != '' and data['dryrun'] == False: PlexDBHandle.execute_query(query) #logger.warning(data['meta']['remove'] ) for base, folders, files in os.walk(data['meta']['metapath']): for f in files: data['file_count'] += 1 filepath = os.path.join(base, f) if filepath not in data['use_filepath']: if os.path.islink(filepath) and os.path.exists( filepath) == False: os.remove(filepath) elif os.path.exists(filepath): data['remove_count'] += 1 if filepath not in data['remove_filepath']: data['remove_filepath'].append(filepath) if os.path.islink(filepath) == False: data['meta']['remove'] += os.path.getsize(filepath) if data['dryrun'] == False: os.remove(filepath) for album in data['albums']: for base, folders, files in os.walk(album['meta']['metapath']): for f in files: #logger.warning(data['file_count']) #logger.warning(f) data['file_count'] += 1 filepath = os.path.join(base, f) if filepath not in album['use_filepath']: if os.path.islink(filepath) and os.path.exists( filepath) == False: os.remove(filepath) elif os.path.exists(filepath): data['remove_count'] += 1 if filepath not in album['remove_filepath']: data['remove_filepath'].append(filepath) if os.path.islink(filepath) == False: data['meta']['remove'] += os.path.getsize( filepath) if data['dryrun'] == False: os.remove(filepath) else: data['use_filepath'].append(filepath) for base, folders, files in os.walk(data['meta']['metapath']): if not folders and not files: os.removedirs(base) for album in data['albums']: for base, folders, files in os.walk(album['meta']['metapath']): if not folders and not files: os.removedirs(base)
def __get_download_list(html, tree, site_instance, item): download_list = [] try: if 'DOWNLOAD_REGEX' not in site_instance.info: return download_list #logger.debug(html) #tmp = html.find('a href="https://www.rgtorrent.me/bbs/download.php') #if tmp != -1: # logger.debug(html[tmp-300:tmp+300]) #logger.debug(site_instance.info['DOWNLOAD_REGEX']) tmp = re.compile(site_instance.info['DOWNLOAD_REGEX'], re.MULTILINE).finditer(html) for t in tmp: #logger.debug(t.group('url')) #logger.debug(t.group('filename')) if t.group('filename').strip() == '': continue entity = {} entity['link'] = py_urllib.unquote( t.group('url').strip()).strip() entity['link'] = unescape(entity['link']) logger.debug(entity['link']) entity['filename'] = py_urllib.unquote( t.group('filename').strip()) entity['filename'] = unescape(entity['filename']) if 'DOWNLOAD_URL_SUB' in site_instance.info: logger.debug(entity['link']) entity['link'] = re.sub( site_instance.info['DOWNLOAD_URL_SUB'][0], site_instance.info['DOWNLOAD_URL_SUB'][1].format( URL=site_instance.info['TORRENT_SITE_URL']), entity['link']).strip() if not entity['link'].startswith('http'): form = '%s%s' if entity['link'].startswith( '/') else '%s/%s' entity['link'] = form % ( site_instance.info['TORRENT_SITE_URL'], entity['link']) if 'FILENAME_SUB' in site_instance.info: entity['filename'] = re.sub( site_instance.info['FILENAME_SUB'][0], site_instance.info['FILENAME_SUB'][1], entity['filename']).strip() exist = False for tt in download_list: if tt['link'] == entity['link']: exist = True break if not exist: if app.config['config']['is_server'] and len( item['magnet']) > 0: # or True: try: ext = os.path.splitext( entity['filename'])[1].lower() #item['magnet'] if ext in ['.smi', '.srt', '.ass']: #if True: import io if 'USE_SELENIUM' in site_instance.info[ 'EXTRA']: from system import SystemLogicSelenium driver = SystemLogicSelenium.get_driver() driver.get(entity['link']) import time time.sleep(10) files = SystemLogicSelenium.get_downloaded_files( ) logger.debug(files) # 파일확인 filename_no_ext = os.path.splitext( entity['filename'].split('/')[-1]) file_index = 0 for idx, value in enumerate(files): if value.find( filename_no_ext[0]) != -1: file_index = idx break logger.debug('fileindex : %s', file_index) content = SystemLogicSelenium.get_file_content( files[file_index]) byteio = io.BytesIO() byteio.write(content) else: data = LogicFromSite.get_html( entity['link'], referer=item['url'], stream=True) byteio = io.BytesIO() for chunk in data.iter_content(1024): byteio.write(chunk) from tool_expand import ToolExpandDiscord entity[ 'direct_url'] = ToolExpandDiscord.discord_cdn( byteio=byteio, filename=entity['filename'], webhook_url=app.config['config'] ['rss_subtitle_webhook'], content='%s\n<%s>' % (item['title'], item['url'])) except Exception as e: logger.debug('Exception:%s', e) logger.debug(traceback.format_exc()) download_list.append(entity) return download_list except Exception as e: logger.debug('Exception:%s', e) logger.debug(traceback.format_exc()) return download_list