def _set_params(self): new_ip_val = None new_ip_port = None torrent_ip = None try: if ':' in self.set_ip.text(): new_ip_val, new_ip_port1 = self.set_ip.text().split(':') new_ip_port = int(new_ip_port1) if ipaddress.ip_address(new_ip_val): ip = 'LOCAL_STREAM_IP=' + new_ip_val + ':' + str(new_ip_port) torrent_ip = 'TORRENT_STREAM_IP=' + new_ip_val + ':' + str( self.ui.local_port) except Exception as err_val: print(err_val, '--ip--find--error--') ip = 'LOCAL_STREAM_IP=' + self.ui.local_ip_stream new_ip_val = self.ui.local_ip_stream new_ip_port = 9001 if os.path.exists(self.set_default_download.text()): location = 'DEFAULT_DOWNLOAD_LOCATION=' + self.set_default_download.text( ) location_val = self.set_default_download.text() else: location = 'DEFAULT_DOWNLOAD_LOCATION=' + self.ui.default_download_location location_val = self.ui.default_download_location backg = self.backg.currentText() img_val = self.img_opt.currentIndex() if img_val == 0: img_val = 1 img_opt_str = 'IMAGE_FIT_OPTION=' + str(img_val) config_file = os.path.join(self.ui.home_folder, 'other_options.txt') lines = open_files(config_file, lines_read=True) new_lines = [] for i in lines: i = i.strip() if i.startswith('LOCAL_STREAM_IP='): i = ip elif i.startswith('DEFAULT_DOWNLOAD_LOCATION='): i = location elif i.startswith('KEEP_BACKGROUND_CONSTANT='): i = backg elif i.startswith('IMAGE_FIT_OPTION='): i = img_opt_str new_lines.append(i) write_files(config_file, new_lines, line_by_line=True) self.ui.local_ip_stream = new_ip_val self.ui.local_port_stream = new_ip_port self.ui.default_download_location = location_val self.ui.image_fit_option_val = img_val back_g = backg.split('=')[1] if back_g == 'no': self.ui.keep_background_constant = False else: self.ui.keep_background_constant = True if torrent_ip: config_file_torrent = os.path.join(self.ui.home_folder, 'torrent_config.txt') change_opt_file(config_file_torrent, 'TORRENT_STREAM_IP=', torrent_ip) self.ui.local_ip = new_ip_val self.hide()
def _set_password(self): text_val = self.text_name.text() pass_val = self.text_pass.text() if not text_val: text_val = '' if not pass_val: pass_val = '' new_combine = bytes(text_val + ':' + pass_val, 'utf-8') new_txt = base64.b64encode(new_combine) new_txt_str = 'Basic ' + str(new_txt, 'utf-8') #print(new_txt, new_txt_str) new_txt_bytes = bytes(str(new_txt_str), 'utf-8') #print(new_txt_bytes) h = hashlib.sha256(new_txt_bytes) h_digest = h.hexdigest() new_pass = '******' + h_digest config_file = os.path.join(self.ui.home_folder, 'other_options.txt') content = open_files(config_file, lines_read=False) content = re.sub('AUTH=[^\n]*', new_pass, content) write_files(config_file, content, line_by_line=False) self.hide() self.ui.media_server_key = h_digest self.ui.client_auth_arr[:] = [] self.ui.client_auth_arr = ['127.0.0.1', '0.0.0.0'] if self.ui.local_ip not in self.ui.client_auth_arr: self.ui.client_auth_arr.append(self.ui.local_ip) if self.ui.local_ip_stream not in self.ui.client_auth_arr: self.ui.client_auth_arr.append(self.ui.local_ip_stream)
def import_video(self, video_file, video_file_bak): m = [] o = [] music = [] p = [] m_files = [] if os.path.isfile(os.path.join(self.home, 'local.txt')): lines_dir = open_files(os.path.join(self.home, 'local.txt'), True) for lines_d in lines_dir: if not lines_d.startswith('#'): music[:] = [] lines_d = lines_d.strip() lines_d = os.path.normpath(lines_d) dirn = lines_d self.logger.debug('checking::dirn={0}'.format(dirn)) if os.path.exists(dirn): self.logger.debug('exists::dirn={0}'.format(dirn)) music.append(dirn) for r, d, f in os.walk(dirn): for z in d: if not z.startswith('.'): music.append(os.path.join(r, z)) else: o.append(os.path.join(r, z)) print(len(music)) j = 0 lines = [] for i in music: if os.path.exists(i): try: n = os.listdir(i) except Exception as err: self.logger.error(err) n = [] p[:] = [] for k in n: file_ext = k.rsplit('.', 1)[-1] if file_ext.lower( ) in self.ui.video_type_arr: p.append(os.path.join(i, k)) path = os.path.join(i, k) if os.path.isfile(path): m_files.append(path) if p: r = i lines.append(r) j = j + 1 return list(set(m_files))
def import_music(self, music_file, music_file_bak): m = [] o = [] music = [] p = [] m_files = [] if os.path.isfile(os.path.join(self.home, 'local.txt')): lines_dir = open_files(os.path.join(self.home, 'local.txt'), True) for lines_d in lines_dir: if not lines_d.startswith('#'): music[:] = [] lines_d = os.path.normpath(lines_d.strip()) dirn = lines_d music.append(dirn) for r, d, f in os.walk(dirn): for z in d: if not z.startswith('.'): music.append(os.path.join(r, z)) else: o.append(os.path.join(r, z)) self.logger.debug(len(music)) j = 0 lines = [] for i in music: if os.path.exists(i): try: n = os.listdir(i) except Exception as err: self.logger.error(err) n = [] p[:] = [] for k in n: file_ext = k.rsplit('.', 1)[-1] if file_ext.lower() in self.ui.music_type_arr: p.append(os.path.join(i, k)) path = os.path.join(i, k) if os.path.isfile(path): s = (path + ' ' + (str(os.path.getmtime(path)) ).split('.')[0]) m_files.append(s) if p: r = i lines.append(r) j = j + 1 return list(set(m_files))
def import_video(self, video_file, video_file_bak): m = [] o = [] music = [] p = [] m_files = [] if os.path.isfile(os.path.join(self.home, 'local.txt')): lines_dir = open_files(os.path.join(self.home, 'local.txt'), True) for lines_d in lines_dir: if not lines_d.startswith('#'): music[:] = [] lines_d = lines_d.strip() lines_d = os.path.normpath(lines_d) dirn = lines_d music.append(dirn) for r, d, f in os.walk(dirn): for z in d: if not z.startswith('.'): music.append(os.path.join(r, z)) else: o.append(os.path.join(r, z)) print(len(m)) j = 0 lines = [] for i in music: if os.path.exists(i): n = os.listdir(i) p[:] = [] for k in n: file_ext = k.rsplit('.', 1)[-1] if file_ext.lower() in self.video_ext: p.append(os.path.join(i, k)) path = os.path.join(i, k) m_files.append(path) if p: r = i lines.append(r) j = j + 1 return list(set(m_files))
def import_video_dir(self): m = [] o = [] video = [] p = [] vid = [] if os.path.isfile(os.path.join(self.home, 'local.txt')): lines_dir = open_files(os.path.join(self.home, 'local.txt'), True) for lines_d in lines_dir: video[:] = [] lines_d = lines_d.strip() dirn = os.path.normpath(lines_d) video.append(dirn) for r, d, f in os.walk(dirn): for z in d: if not z.startswith('.'): video.append(os.path.join(r, z)) else: o.append(os.path.join(r, z)) print(len(m)) j = 0 lines = [] for i in video: if os.path.exists(i): n = os.listdir(i) p[:] = [] for k in n: file_ext = k.rsplit('.', 1)[-1] if file_ext.lower() in self.ui.video_type_arr: p.append(os.path.join(i, k)) if p: r = i vid.append(str(r)) j = j + 1 return vid
def get_epn_arr_list(self, site, name, video_dir): epn_arr = [] if site.lower() == 'video' and video_dir: video_db = os.path.join(ui.home_folder, 'VideoDB', 'Video.db') if os.path.exists(video_db): epn_arr_tmp = ui.media_data.get_video_db(video_db, "Directory", video_dir) for i in epn_arr_tmp: epn_name = i[0]+' '+i[1] logger.debug(epn_name) epn_arr.append(epn_name) elif video_dir: new_name_with_info = video_dir.strip() extra_info = '' if ' ' in new_name_with_info: name_title = new_name_with_info.split(' ')[0] extra_info = new_name_with_info.split(' ')[1] else: name_title = new_name_with_info if site.lower() == 'subbedanime' or site.lower() == 'dubbedanime': siteName = ui.get_parameters_value(s='siteName')['siteName'] hist_site = os.path.join(ui.home_folder, 'History', site, siteName, name_title) else: hist_site = os.path.join(ui.home_folder, 'History', site, name_title) hist_epn = os.path.join(hist_site, 'Ep.txt') logger.info(hist_epn) if os.path.exists(hist_epn): lines = open_files(hist_epn, True) for i in lines: i = i.strip() j = i.split(' ') if len(j) == 1: epn_arr.append(i+' '+i+' '+name) elif len(j) >= 2: epn_arr.append(i+' '+name) return epn_arr
def options_from_bookmark(self, site, site_option, search_term, search_exact=None): original_path_name = [] bookmark = False music_opt = '' video_opt = '' opt = '' status = site_option siteName = site_option new_dir_path = None new_name = 'Not Available' send_list_direct = False new_epnArrList = [] if site.lower() == 'bookmark': bookmark = True status = site_option if status == "all": status = "bookmark" else: book_path = os.path.join(home, 'Bookmark') m = os.listdir(book_path) for i in m: i = i.replace('.txt', '') if i.lower() == site_option.lower(): status = i break m = [] new_video_local_stream = False print(bookmark, status, '--15627--') opt = 'history' bookmark_path = os.path.join(home, 'Bookmark', status+'.txt') if bookmark and os.path.isfile(bookmark_path): line_a = open_files(bookmark_path, True) logger.info(line_a) r = 0 book_arr = [] if search_term: for k, i in enumerate(line_a): j = i.strip() if j: j = i.split(':') if j: print(j) if search_term.lower() in j[5].lower(): site = j[0] r = k print(site, r) site_option = j[1] bookmark = False break else: for i in line_a: j = i.strip() if j: j = i.split(':') if j: book_arr.append(j[5].strip()) if search_term: tmp = line_a[r] tmp = tmp.strip() tmp1 = tmp.split(':') site = tmp1[0] if site.lower() == "music" or site.lower() == "video": opt = "Not Defined" if site.lower() == "music": music_opt = tmp1[1] else: video_opt = tmp1[1] else: opt = tmp1[1] pre_opt = tmp1[2] siteName = tmp1[2] base_url = int(tmp1[3]) embed = int(tmp1[4]) name = tmp1[5] new_name = name if site.lower() == "local": name_path = name video_local_stream = False logger.info(name) if len(tmp1) > 6: if tmp1[6] == "True": finalUrlFound = True else: finalUrlFound = False if tmp1[7] == "True": refererNeeded = True else: refererNeeded = False if len(tmp1) >= 9: if tmp1[8] == "True": video_local_stream = True else: video_local_stream = False if len(tmp1) >= 10: new_dir_path = tmp1[9] if os.name == 'nt': if len(tmp1) == 11: new_dir_path = new_dir_path + ':' + tmp1[10] print(finalUrlFound) print(refererNeeded) else: refererNeeded = False finalUrlFound = False logger.info(site + ":"+opt) else: site = 'None' original_path_name = [i for i in book_arr] site_var = None criteria = [] print(bookmark, status, site, opt, '--15713--') if (not site.lower().startswith("playlist") and site.lower() != "music" and site.lower() != "video" and site.lower()!= "local" and site.lower() != "none"): for i in ui.addons_option_arr: if site.lower() == i.lower(): site = i break plugin_path = os.path.join(home, 'src', 'Plugins', site+'.py') if os.path.exists(plugin_path): if site_var: del site_var site_var = '' module = imp.load_source(site, plugin_path) site_var = getattr(module, site)(TMPDIR) if site_var: criteria = site_var.getOptions() print(criteria) tmp = criteria[-1] if tmp.lower() == 'newversion': criteria.pop() ui.options_mode = 'new' tmp = criteria[-1] if tmp == 'LocalStreaming': criteria.pop() video_local_stream = True new_video_local_stream = True else: return 0 genre_num = 0 if (site.lower() !="local" and site.lower() != "music" and site.lower() != "subbedanime" and site.lower() != "dubbedanime" and not site.lower().startswith("playlist") and site.lower() != "video" and site.lower() != 'none'): t_opt = 'history' opt = t_opt if t_opt == "history": genre_num = 0 opt = t_opt file_path = os.path.join(home, 'History', site, 'history.txt') if os.path.isfile(file_path): lines = open_files(file_path, True) lins = open_files(file_path, True) list1_items = [] original_path_name[:] = [] for i in lins: i = i.strip() j = i if ' ' in i: i = i.split(' ')[0] original_path_name.append(j) if new_video_local_stream and ui.stream_session: handle = ui.get_torrent_handle(search_term) if handle is not None: ui.torrent_handle = handle else: opt = t_opt try: if video_local_stream: new_video_local_stream = True history_folder = os.path.join(home, 'History', site) if os.path.exists(history_folder): m = site_var.getCompleteList( t_opt, ui.list6, ui.progress, ui.tmp_download_folder, history_folder ) if ui.stream_session: handle = ui.get_torrent_handle(search_term) if handle is not None: ui.torrent_handle = handle else: m = site_var.getCompleteList(t_opt, 0) except Exception as e: print(e) return 0 original_path_name[:] = [] for i in m: i = i.strip() if ' ' in i: j = i.split(' ')[0] else: j = i original_path_name.append(i) elif site.lower() == "subbedanime" or site.lower() == "dubbedanime": code = 2 siteName = site_option if site_var: criteria = site_var.getOptions() for i in criteria: if siteName.lower() == i.lower(): siteName = i break opt = "history" original_path_name[:] = [] if opt == "history": file_path = os.path.join(home, 'History', site, siteName, 'history.txt') if os.path.isfile(file_path): lines = open_files(file_path, True) original_path_name[:] = [] for i in lines: i = i.strip() if ' ' in i: j = i.split(' ')[0] else: j = i original_path_name.append(i) elif site.lower() == "music": music_dir = os.path.join(home, 'Music') music_db = os.path.join(home, 'Music', 'Music.db') music_file = os.path.join(home, 'Music', 'Music.txt') music_file_bak = os.path.join(home, 'Music', 'Music_bak.txt') if not os.path.exists(music_db): ui.media_data.create_update_music_db( music_db, music_file, music_file_bak, update_progress_show=False ) music_opt = site_option print(music_opt) if music_opt: music_opt = music_opt[0].upper()+music_opt[1:] if '-' in music_opt: tmp = music_opt.split('-', 1) sub_tmp = tmp[1] music_opt = tmp[0]+'-'+sub_tmp[0].upper()+sub_tmp[1:] artist = [] epnArrList = [] if music_opt.lower().startswith("playlist"): pls = os.path.join(home, 'Playlists') if os.path.exists(pls): m = os.listdir(pls) for i in m: artist.append(i) else: """ if search_exact and music_opt.lower() != 'directory': m = ui.media_data.get_music_db(music_db, music_opt, search_term) for i in m: artist.append(i[1]+' '+i[2]+' '+i[0]) send_list_direct = True else: """ m = ui.media_data.get_music_db(music_db, music_opt, "") for i in m: artist.append(i[0]) if send_list_direct: print('exact search on') new_epnArrList = [i for i in artist] else: original_path_name[:] = [] if (music_opt.lower() == "artist" or music_opt.lower() == "album" or music_opt.lower() == "title" or music_opt.lower() == "fav-artist" or music_opt.lower() == "fav-album"): for i in artist: original_path_name.append(i) elif music_opt.lower() == "directory" or music_opt.lower() == "fav-directory": for i in artist: original_path_name.append(i) i = os.path.basename(i) elif music_opt.lower().startswith("playlist"): for i in artist: original_path_name.append(i.replace('.txt', '')+' '+os.path.join(home, 'Playlists', i)) #print(original_path_name) elif site.lower() == "video": video_dir = os.path.join(home, 'VideoDB') video_db = os.path.join(video_dir, 'Video.db') video_file = os.path.join(video_dir, 'Video.txt') video_file_bak = os.path.join(video_dir, 'Video_bak.txt') if not os.path.exists(video_db): ui.media_data.create_update_video_db( video_db, video_file, video_file_bak, update_progress_show=False ) if not bookmark: video_opt = site_option print('----video-----opt', video_opt) if video_opt.lower() == 'update' or video_opt.lower() == 'updateall': video_opt = 'Available' print('----video-----opt', video_opt) opt = video_opt artist = [] m = [] if not bookmark: if video_opt.lower() == "available": m = ui.media_data.get_video_db(video_db, "Directory", "") elif video_opt.lower() == "history": m = ui.media_data.get_video_db(video_db, "History", "") else: m = ui.media_data.get_video_db(video_db, video_opt, "") else: book_file = os.path.join(home, 'Bookmark', status+'.txt') if os.path.exists(book_file): line_a = open_files(book_file, True) m = [] for i in line_a: i = i.strip() try: new_name = i.split(':')[5] try: new_dir = i.split(':')[9] except: new_dir = new_name logger.info('{0}-{1}-{2}'.format(search_term, new_name, new_dir)) if search_term and search_term in new_name.lower(): original_path_name.append(new_name+' '+new_dir) m1 = ui.media_data.get_video_db(video_db, "Directory", new_dir) for i in m1: m.append(i[0]+' '+i[1]+' '+new_name) logger.info(m) logger.info('---14226---') video_opt = 'directory' elif not search_term: original_path_name.append(new_name+' '+new_dir) except Exception as e: print(e) send_list_direct = True #site = 'bookmark' if not send_list_direct: for i in m: artist.append(i[0]+' '+i[1]) else: new_epnArrList = [i for i in m] print('direct match:') #original_path_name[:] = [] logger.info(artist) if (video_opt.lower() != "update" and video_opt.lower() != "updateall") and not send_list_direct: for i in artist: ti = i.split(' ')[0] di = i.split(' ')[1] if os.path.exists(di): if ti.lower().startswith('season') or ti.lower().startswith('special'): new_di, new_ti = os.path.split(di) logger.info('new_di={0}-{1}'.format(new_di, new_ti)) new_di = os.path.basename(new_di) ti = new_di+'-'+ti original_path_name.append(ti+' '+di) else: original_path_name.append(i) original_path_name = sorted( original_path_name, key=lambda x: x.split(' ')[0].lower() ) elif site.lower().startswith("playlist"): pls = os.path.join(home, 'Playlists') if os.path.exists(pls): m = os.listdir(pls) for i in m: j = i.replace('.txt', '') original_path_name.append(j+' '+os.path.join(pls, i)) logger.info(original_path_name) print('--------14243-----------') if search_term: if not send_list_direct: epnArrList = self.listfound_from_bookmark( site, site_option, search_term, original_path_name, search_exact=search_exact) else: epnArrList = new_epnArrList if site.lower() == 'video': ret_tuple = (epnArrList, site, video_opt, False, siteName) elif site.lower() == 'music': ret_tuple = (epnArrList, site, music_opt, False, siteName) elif site.lower().startswith('playlist'): ret_tuple = (epnArrList, site, 'none', False, siteName) else: ret_tuple = (epnArrList, site, opt, new_video_local_stream, siteName) return ret_tuple else: return original_path_name
def record_torrent(self, item, hist_folder): tmp_dir = TMPDIR name = '' if not os.path.exists(hist_folder): os.makedirs(hist_folder) if item.startswith('http') or os.path.isfile(item): home = hist_folder name1 = os.path.basename(item).replace('.torrent', '') torrent_dest1 = os.path.join(tmp_dir, name1+'.torrent') if not os.path.exists(torrent_dest1): if item.startswith('http'): ccurl(item+'#'+'-o'+'#'+torrent_dest1) else: shutil.copy(item, torrent_dest1) if os.path.exists(torrent_dest1): info = lt.torrent_info(torrent_dest1) name = info.name() torrent_dest = os.path.join(home, name+'.torrent') shutil.copy(torrent_dest1, torrent_dest) logger.info(name) elif item.startswith('magnet:'): torrent_handle, stream_session, info = get_torrent_info_magnet( item, tmp_dir, self, ui.progress, tmp_dir) torrent_file = lt.create_torrent(info) home = hist_folder name = info.name() torrent_dest = os.path.join(home, name+'.torrent') with open(torrent_dest, "wb") as f: f.write(lt.bencode(torrent_file.generate())) torrent_handle.pause() stream_session.pause() ui.stop_torrent_forcefully() if name: torrent_dest = os.path.join(home, name+'.torrent') info = lt.torrent_info(torrent_dest) file_arr = [] for f in info.files(): file_path = f.path file_path = os.path.basename(file_path) file_arr.append(file_path) if file_arr: hist_path = os.path.join(home, 'history.txt') if not os.path.isfile(hist_path): hist_dir, last_field = os.path.split(hist_path) if not os.path.exists(hist_dir): os.makedirs(hist_dir) f = open(hist_path, 'w').close() if os.path.isfile(hist_path): if (os.stat(hist_path).st_size == 0): write_files(hist_path, name, line_by_line=True) else: lines = open_files(hist_path, True) line_list = [] for i in lines: i = i.strip() line_list.append(i) if name not in line_list: write_files(hist_path, name, line_by_line=True) hist_site = os.path.join(hist_folder, name) if not os.path.exists(hist_site): try: os.makedirs(hist_site) hist_epn = os.path.join(hist_site, 'Ep.txt') write_files(hist_epn, file_arr, line_by_line=True) except Exception as e: print(e) return name
def listfound_from_bookmark( self, site, site_option, search_term, original_path_name, search_exact=None): site_var = None bookmark = False status = site_option logger.info('\n{0}:{1}:{2}\n --473--serverlib'.format(site,site_option,search_term)) if site.lower() == 'bookmark': bookmark = True status = site_option if status.lower() == 'all': status = 'bookmark' else: m = os.listdir(os.path.join(home, 'Bookmark')) for i in m: i = i.lower().replace('.txt', '') if i == site_option.lower(): status = i break m = [] search_term = search_term.lower() epnArrList = [] new_dir_path = None new_name = 'Not Available' bookmark_path = os.path.join(home, 'Bookmark', status+'.txt') if bookmark and os.path.isfile(bookmark_path): line_a = open_files(bookmark_path, True) r = 0 for k, i in enumerate(line_a): j = i.strip() if j: j = i.split(':') if j: if search_term in j[5].lower(): site = j[0] r = k break tmp = line_a[r] tmp = tmp.strip() tmp1 = tmp.split(':') site = tmp1[0] if site.lower() == "music" or site.lower() == "video": opt = "Not Defined" if site.lower() == "music": music_opt = tmp1[1] else: video_opt = tmp1[1] else: opt = tmp1[1] pre_opt = tmp1[2] siteName = tmp1[2] base_url = int(tmp1[3]) embed = int(tmp1[4]) name = tmp1[5] new_name = name if site.lower() == "local": name_path = name video_local_stream = False logger.info(name) if len(tmp1) > 6: if tmp1[6] == "True": finalUrlFound = True else: finalUrlFound = False if tmp1[7] == "True": refererNeeded = True else: refererNeeded = False if len(tmp1) >= 9: if tmp1[8] == "True": video_local_stream = True else: video_local_stream = False if len(tmp1) >= 10: new_dir_path = tmp1[9] print(finalUrlFound) print(refererNeeded) print(video_local_stream) else: refererNeeded = False finalUrlFound = False logger.info(site + ":"+opt) site_var = None logger.info('--16069----') if (not site.lower().startswith("playlist") and site.lower() != "music" and site.lower() != "video" and site.lower() != "local" and site.lower() != "none"): logger.info('search_term={0}'.format(search_term)) if search_term: epnArrList = [] for i in ui.addons_option_arr: if site.lower() == i.lower(): site = i break plugin_path = os.path.join(home, 'src', 'Plugins', site+'.py') if os.path.exists(plugin_path): logger.info('plugin_path={0}'.format(plugin_path)) if site_var: del site_var site_var = '' module = imp.load_source(site, plugin_path) site_var = getattr(module, site)(TMPDIR) siteName = site_option if site_var: if site.lower() == 'subbedanime' or site.lower() == 'dubbedanime': criteria = site_var.getOptions() for i in criteria: if siteName.lower() == i.lower(): siteName = i break else: return 0 for i, value in enumerate(original_path_name): search_field = value.lower() if search_exact: if ' ' in search_field: search_field = search_field.split(' ')[0] logger.info('search_field={0}'.format(search_field)) if ((search_term in search_field and not search_exact) or (search_term == search_field and search_exact)): cur_row = i new_name_with_info = original_path_name[cur_row].strip() extra_info = '' logger.info('cur_row={0}, new_name={1}'.format(i, new_name_with_info)) if ' ' in new_name_with_info: name = new_name_with_info.split(' ')[0] extra_info = new_name_with_info.split(' ')[1] else: name = new_name_with_info if site.lower() == 'subbedanime' or site.lower() == 'dubbedanime': hist_site = os.path.join(home, 'History', site, siteName, name) else: hist_site = os.path.join(home, 'History', site, name) hist_epn = os.path.join(hist_site, 'Ep.txt') logger.info(hist_epn) if os.path.exists(hist_epn): lines = open_files(hist_epn, True) m = [] for i in lines: i = i.strip() j = i.split(' ') if len(j) == 1: epnArrList.append(i+' '+i+' '+name) elif len(j) >= 2: epnArrList.append(i+' '+name) picn = os.path.join(hist_site, 'poster.jpg') fanart = os.path.join(hist_site, 'fanart.jpg') thumbnail = os.path.join(hist_site, 'thumbnail.jpg') sum_file = os.path.join(hist_site, 'summary.txt') summary = ui.get_summary_history(sum_file) elif site.lower() == "music": art_n = search_term music_dir = os.path.join(home, 'Music') music_db = os.path.join(home, 'Music', 'Music.db') music_file = os.path.join(home, 'Music', 'Music.txt') music_file_bak = os.path.join(home, 'Music', 'Music_bak.txt') music_opt = site_option if music_opt: music_opt = music_opt[0].upper()+music_opt[1:] if '-' in music_opt: tmp = music_opt.split('-', 1) sub_tmp = tmp[1] music_opt = tmp[0]+'-'+sub_tmp[0].upper()+sub_tmp[1:] artist = [] logger.info(original_path_name) hash_srch = None hash_dir = None if search_term.endswith('.hash'): hash_srch = search_term.rsplit('.', 1)[0] logger.debug(hash_srch) for index, value in enumerate(original_path_name): if music_opt.lower() == 'directory' or music_opt.lower() == 'fav-directory': search_field = os.path.basename(value).lower() if hash_srch: hash_dir = bytes(value, 'utf-8') else: search_field = value.lower() if hash_srch: if music_opt.lower().startswith('playlist'): hash_dir = bytes(value.split('\t')[1], 'utf-8') else: hash_dir = bytes(value.split('\t')[0], 'utf-8') #logger.debug(value) if hash_srch and hash_dir: h = hashlib.sha256(hash_dir) hash_val = h.hexdigest() if hash_val == hash_srch: search_term = search_field else: continue if ((search_term in search_field and not search_exact) or (search_term == search_field and search_exact)): if ' ' in value.lower(): art_n = value.split(' ')[0] else: art_n = value.strip() if music_opt.lower() == "directory": art_n = value if music_opt.lower() == "fav-directory": art_n = value if music_opt.lower() == "playlist" or music_opt.lower() == "playlists": pls = value.split(' ')[0] m = open_files(os.path.join(home, 'Playlists', pls), True) for i in m: i = i.replace('\n', '') if i: j = i.split(' ') i1 = j[0] i2 = j[1] try: i3 = j[2] except: i3 = "None" artist.append(i1+' '+i2+' '+i3) else: music_opt = music_opt[0].upper()+music_opt[1:] if '-' in music_opt: tmp = music_opt.split('-', 1) sub_tmp = tmp[1] music_opt = tmp[0]+'-'+sub_tmp[0].upper()+sub_tmp[1:] m = ui.media_data.get_music_db(music_db, music_opt, art_n) for i in m: artist.append(i[1]+' '+i[2]+' '+i[0]) if (search_term == search_field and search_exact): print('exact match:') break epnArrList[:] = [] for i in artist: epnArrList.append((i)) elif site.lower().startswith("playlist"): epnArrList = [] for index, value in enumerate(original_path_name): search_field = value.lower().split(' ')[0] if ((search_term in search_field and not search_exact) or (search_term == search_field and search_exact)): pls = value.split(' ')[0] file_path = os.path.join(home, 'Playlists', str(pls)) if os.path.exists(file_path): lines = open_files(file_path, True) k = 0 for i in lines: i = i.strip() if i: if not search_exact: i = i+'##'+pls epnArrList.append(i) elif site.lower() == "video": epnArrList = [] hash_srch = None if search_term.endswith('.hash'): hash_srch = search_term.rsplit('.', 1)[0] logger.debug(hash_srch) for index, value in enumerate(original_path_name): if ' ' in value.lower(): art_n = value.split(' ')[0] else: art_n = value.strip() search_field = art_n.lower() if hash_srch: hash_dir = bytes(value.split('\t')[1], 'utf-8') h = hashlib.sha256(hash_dir) hash_val = h.hexdigest() if hash_val == hash_srch: search_term = search_field else: continue if ((search_term in search_field and not search_exact) or (search_term == search_field and search_exact)): name = art_n video_dir = os.path.join(home, 'VideoDB') logger.info('{0}--search-client--'.format(art_n)) video_db = os.path.join(video_dir, 'Video.db') video_file = os.path.join(video_dir, 'Video.txt') video_file_bak = os.path.join(video_dir, 'Video_bak.txt') artist = [] if not bookmark: video_opt = site_option[0].upper()+site_option[1:] print(video_opt, '---15112----') if video_opt.lower() == "update" or video_opt.lower() == "updateall": video_opt = "Available" n_art_n = original_path_name[index].split(' ')[-1] m = ui.media_data.get_video_db(video_db, "Directory", n_art_n) logger.info('{0}--{1}--search-client--14534--'.format(art_n, n_art_n)) else: try: new_dir_path = search_field.split(' ')[-1] except Exception as e: print(e) logger.info(new_dir_path) if new_dir_path is not None: if new_dir_path.lower() != 'none': m = ui.media_data.get_video_db( video_db, "Directory", new_dir_path) else: m = ui.media_data.get_video_db( video_db, "Bookmark", art_n) else: m = ui.media_data.get_video_db( video_db, "Bookmark", art_n) for i in m: artist.append(i[0]+' '+i[1]+' '+art_n) for i in artist: epnArrList.append((i)) dir_path = os.path.join(home, 'Local', art_n) if os.path.exists(dir_path): picn = os.path.join(home, 'Local', art_n, 'poster.jpg') thumbnail = os.path.join(home, 'Local', art_n, 'thumbnail.jpg') fanart = os.path.join(home, 'Local', art_n, 'fanart.jpg') summary1 = os.path.join(home, 'Local', art_n, 'summary.txt') if os.path.exists(summary1): summary = open_files(summary1, False) else: summary = "Not Available" if (search_term == search_field and search_exact): print('Exact Match:') break return epnArrList
def run(self): name = self.name url = self.url direct_url = self.direct_url #print(name, url, direct_url, '--poster--thread--') fanart = os.path.join(TMPDIR, name + '-fanart.jpg') thumb = os.path.join(TMPDIR, name + '.jpg') fan_text = os.path.join(TMPDIR, name + '-fanart.txt') post_text = os.path.join(TMPDIR, name + '-poster.txt') logger.info(fanart) logger.info(thumb) final_link = "" m = [] if site == 'Music': final = '' if (self.copy_fanart and self.copy_poster and self.copy_summary): if not direct_url and not url: nam = self.name_adjust(name) url = "http://www.last.fm/search?q=" + nam logger.info(url) wiki = "" content = ccurl(url) soup = BeautifulSoup(content, 'lxml') link = soup.findAll('div', {'class': 'row clearfix'}) name3 = "" for i in link: j = i.findAll('a') for k in j: try: url = k['href'] if '?q=' not in url: logger.info(url) break except Exception as err: print(err, '--108--') logger.info(url) if url.startswith('http'): url = url else: url = "http://www.last.fm" + url logger.info(url) img_url = url + '/+images' wiki_url = url + '/+wiki' logger.info(wiki_url) content = ccurl(wiki_url) soup = BeautifulSoup(content, 'lxml') link = soup.find('div', {'class': 'wiki-content'}) if link: wiki = link.text self.summary_signal.emit(name, wiki, 'summary') content = ccurl(img_url) soup = BeautifulSoup(content, 'lxml') link = soup.findAll('ul', {'class': 'image-list'}) img = [] for i in link: j = i.findAll('img') for k in j: l = k['src'] u1 = l.rsplit('/', 2)[0] u2 = l.split('/')[-1] u = u1 + '/770x0/' + u2 img.append(u) img = list(set(img)) logger.info(len(img)) thumb = os.path.join(TMPDIR, name + '.jpg') if img: url = img[0] try: ccurl(url, curl_opt='-o', out_file=thumb) except Exception as err: print(err, '--151--') elif (self.copy_poster or self.copy_fanart) and url and direct_url: if 'last.fm' in url: logger.info('--artist-link---{0}'.format(url)) content = ccurl(url) soup = BeautifulSoup(content, 'lxml') link = soup.findAll('img') url1Code = url.split('/')[-1] found = None for i in link: if 'src' in str(i): j = i['src'] k = j.split('/')[-1] if url1Code == k: found = j break logger.info(str(found)) if found: u1 = found.rsplit('/', 2)[0] u2 = found.split('/')[-1] final = u1 + '/770x0/' + u2 logger.info(final) elif (".jpg" in url or ".png" in url) and url.startswith('http'): final = url else: final = '' try: if final.startswith('http'): ccurl(final, curl_opt='-o', out_file=thumb) except Exception as e: print(e) else: nam = self.name_adjust(name) src_site = 'tvdb' if self.use_search: if isinstance(self.use_search, bool): final_link, m = self.ddg_search(nam, 'tvdb') if not m: final_link, m = self.ddg_search(nam, 'tmdb') if m: src_site = 'tmdb' else: final_link, m = self.ddg_search(nam, self.use_search, direct_search=True) src_site = self.use_search else: if direct_url and url: if (".jpg" in url or ".png" in url or url.endswith('.webp')) and "http" in url: if self.copy_poster: ccurl(url + '#' + '-o' + '#' + thumb) elif self.copy_fanart: ccurl(url + '#' + '-o' + '#' + fanart) elif 'tvdb' in url or 'themoviedb' in url: final_link = url logger.info(final_link) m.append(final_link) if 'themoviedb' in url: src_site = 'tmdb' else: link = "http://thetvdb.com/index.php?seriesname=" + nam + "&fieldlocation=1&language=7&genre=Animation&year=&network=&zap2it_id=&tvcom_id=&imdb_id=&order=translation&addedBy=&searching=Search&tab=advancedsearch" logger.info(link) content = ccurl(link) m = re.findall('/index.php[^"]tab=[^"]*', content) if not m: link = "http://thetvdb.com/index.php?seriesname=" + nam + "&fieldlocation=2&language=7&genre=Animation&year=&network=&zap2it_id=&tvcom_id=&imdb_id=&order=translation&addedBy=&searching=Search&tab=advancedsearch" content = ccurl(link) m = re.findall('/index.php[^"]tab=[^"]*', content) if not m: link = "http://thetvdb.com/?string=" + nam + "&searchseriesid=&tab=listseries&function=Search" content = ccurl(link) m = re.findall('/[^"]tab=series[^"]*lid=7', content) if m and (src_site == 'tvdb' or src_site == 'tvdb+g' or src_site == 'tvdb+ddg'): if not final_link: n = re.sub('amp;', '', m[0]) elist = re.sub('tab=series', 'tab=seasonall', n) url = "http://thetvdb.com" + n logger.info(url) elist_url = "http://thetvdb.com" + elist else: url = final_link content = ccurl(url) soup = BeautifulSoup(content, 'lxml') sumry = soup.find('div', {'id': 'content'}) linkLabels = soup.findAll('div', {'id': 'content'}) logger.info(sumry) t_sum = re.sub('</h1>', '</h1><p>', str(sumry)) t_sum = re.sub('</div>', '</p></div>', str(t_sum)) soup = BeautifulSoup(t_sum, 'lxml') try: title = (soup.find('h1')).text except Exception as err_val: print(err_val) title = 'Title Not Available' title = re.sub('&', '&', title) try: sumr = (soup.find('p')).text except Exception as e: print(e, '--233--') sumr = "Not Available" try: link1 = linkLabels[1].findAll('td', {'id': 'labels'}) logger.info(link1) labelId = "" for i in link1: j = i.text if "Genre" in j: k = str(i.findNext('td')) l = re.findall('>[^<]*', k) q = "" for p in l: q = q + " " + p.replace('>', '') k = q else: k = i.findNext('td').text k = re.sub('\n|\t', '', k) labelId = labelId + j + " " + k + '\n' except: labelId = "" summary = title + '\n\n' + labelId + sumr summary = re.sub('\t', '', summary) fan_all = post_all = [] if self.copy_summary: self.summary_signal.emit(name, summary, 'summary') fan_all = re.findall('/[^"]tab=seriesfanart[^"]*', content) logger.info(fan_all) content1 = "" content2 = "" post_all = re.findall('/[^"]tab=seriesposters[^"]*', content) logger.info(post_all) direct_jpg = False if not fan_all and not post_all: fan_all = re.findall('banners/seasons/[^"]*.jpg', content) post_all = fan_all direct_jpg = True if fan_all: url_fan_all = "http://thetvdb.com" + fan_all[0] logger.info(url_fan_all) if not direct_jpg: content1 = ccurl(url_fan_all) m = re.findall('banners/fanart/[^"]*jpg', content1) else: m = fan_all m = list(set(m)) #m.sort() m = random.sample(m, len(m)) length = len(m) - 1 logger.info(m) fanart_text = os.path.join(TMPDIR, name + '-fanart.txt') if not os.path.isfile(fanart_text): f = open(fanart_text, 'w') f.write(m[0]) i = 1 while (i <= length): if not "vignette" in m[i]: f.write('\n' + m[i]) i = i + 1 f.close() else: m = re.findall('banners/fanart/[^"]*.jpg', content) m = list(set(m)) #m.sort() m = random.sample(m, len(m)) length = len(m) - 1 logger.info(m) fanart_text = os.path.join(TMPDIR, name + '-fanart.txt') if not os.path.isfile(fanart_text) and m: f = open(fanart_text, 'w') f.write(m[0]) i = 1 while (i <= length): if not "vignette" in m[i]: f.write('\n' + m[i]) i = i + 1 f.close() if post_all: url_post_all = "http://thetvdb.com" + post_all[0] logger.info(url_post_all) if not direct_jpg: content2 = ccurl(url_post_all) r = re.findall('banners/posters/[^"]*jpg', content2) else: r = post_all r = list(set(r)) #r.sort() r = random.sample(r, len(r)) logger.info(r) length = len(r) - 1 poster_text = os.path.join(TMPDIR, name + '-poster.txt') if not os.path.isfile(poster_text): f = open(poster_text, 'w') f.write(r[0]) i = 1 while (i <= length): f.write('\n' + r[i]) i = i + 1 f.close() else: r = re.findall('banners/posters/[^"]*.jpg', content) r = list(set(r)) #r.sort() r = random.sample(r, len(r)) logger.info(r) length = len(r) - 1 poster_text = os.path.join(TMPDIR, name + '-poster.txt') if (r) and (not os.path.isfile(poster_text)): f = open(poster_text, 'w') f.write(r[0]) i = 1 while (i <= length): f.write('\n' + r[i]) i = i + 1 f.close() poster_text = os.path.join(TMPDIR, name + '-poster.txt') fanart_text = os.path.join(TMPDIR, name + '-fanart.txt') if os.path.isfile(poster_text) and os.stat( poster_text).st_size: lines = open_files(poster_text, True) logger.info(lines) url1 = re.sub('\n|#', '', lines[0]) url = "http://thetvdb.com/" + url1 ccurl(url + '#' + '-o' + '#' + thumb) if os.path.isfile(fanart_text) and os.stat( fanart_text).st_size: lines = open_files(fanart_text, True) logger.info(lines) url1 = re.sub('\n|#', '', lines[0]) url = "http://thetvdb.com/" + url1 ccurl(url + '#' + '-o' + '#' + fanart) if os.path.exists(fanart_text): os.remove(fanart_text) if os.path.exists(poster_text): os.remove(poster_text) elif m and (src_site == 'tmdb' or src_site == 'tmdb+g' or src_site == 'tmdb+ddg'): url = final_link url_ext = [ 'discuss', 'reviews', 'posters', 'changes', 'videos', '#' ] url_end = url.rsplit('/', 1)[1] if url_end in url_ext: url = url.rsplit('/', 1)[0] if '?' in url: url = url.split('?')[0] content = ccurl(url) soup = BeautifulSoup(content, 'lxml') #logger.info(soup.prettify()) title_div = soup.find('div', {'class': 'title'}) if title_div: title = title_div.text else: title = name summ = soup.find('div', {'class': 'overview'}) if summ: summary = summ.text.strip() else: summary = 'Not Available' cer_t = soup.find('div', {'class': 'certification'}) if cer_t: cert = cer_t.text else: cert = 'None' genre = soup.find("section", {"class": "genres right_column"}) if genre: genres = genre.text.strip() genres = genres.replace('\n', ' ') genres = genres.replace('Genres', 'Genres:') else: genres = 'No Genres' new_summary = title.strip() + '\n\n' + cert.strip( ) + '\n' + genres.strip() + '\n\n' + summary.strip() if self.copy_summary: self.summary_signal.emit(name, new_summary, 'summary') url = url + '/images/posters' content = ccurl(url) posters_link = re.findall( 'https://image.tmdb.org/[^"]*original[^"]*.jpg', content) if posters_link: posters_link = random.sample(posters_link, len(posters_link)) if len(posters_link) == 1: url = posters_link[0] ccurl(url + '#' + '-o' + '#' + thumb) elif len(posters_link) >= 2: ccurl(posters_link[0] + '#' + '-o' + '#' + thumb) ccurl(posters_link[1] + '#' + '-o' + '#' + fanart)
def listfound_from_bookmark( self, site, site_option, search_term, original_path_name, search_exact=None ): site_var = None bookmark = False status = site_option logger.info( "\n{0}:{1}:{2}\n --473--serverlib".format(site, site_option, search_term) ) if site.lower() == "bookmark": bookmark = True status = site_option if status.lower() == "all": status = "bookmark" else: m = os.listdir(os.path.join(home, "Bookmark")) for i in m: i = i.lower().replace(".txt", "") if i == site_option.lower(): status = i break m = [] search_term = search_term.lower() epnArrList = [] new_dir_path = None new_name = "Not Available" bookmark_path = os.path.join(home, "Bookmark", status + ".txt") if bookmark and os.path.isfile(bookmark_path): line_a = open_files(bookmark_path, True) r = 0 for k, i in enumerate(line_a): j = i.strip() if j: j = i.split(":") if j: if search_term in j[5].lower(): site = j[0] r = k break tmp = line_a[r] tmp = tmp.strip() tmp1 = tmp.split(":") site = tmp1[0] if site.lower() == "music" or site.lower() == "video": opt = "Not Defined" if site.lower() == "music": music_opt = tmp1[1] else: video_opt = tmp1[1] else: opt = tmp1[1] pre_opt = tmp1[2] siteName = tmp1[2] base_url = int(tmp1[3]) embed = int(tmp1[4]) name = tmp1[5] new_name = name if site.lower() == "local": name_path = name video_local_stream = False logger.info(name) if len(tmp1) > 6: if tmp1[6] == "True": finalUrlFound = True else: finalUrlFound = False if tmp1[7] == "True": refererNeeded = True else: refererNeeded = False if len(tmp1) >= 9: if tmp1[8] == "True": video_local_stream = True else: video_local_stream = False if len(tmp1) >= 10: new_dir_path = tmp1[9] print(finalUrlFound) print(refererNeeded) print(video_local_stream) else: refererNeeded = False finalUrlFound = False logger.info(site + ":" + opt) site_var = None logger.info("--16069----") if ( not site.lower().startswith("playlist") and site.lower() != "music" and site.lower() != "video" and site.lower() != "local" and site.lower() != "none" ): logger.info("search_term={0}".format(search_term)) if search_term: epnArrList = [] for i in ui.addons_option_arr: if site.lower() == i.lower(): site = i break plugin_path = os.path.join(home, "src", "Plugins", site + ".py") if os.path.exists(plugin_path): logger.info("plugin_path={0}".format(plugin_path)) if site_var: del site_var site_var = "" module = imp.import_module(site, plugin_path) site_var = getattr(module, site)(TMPDIR) siteName = site_option if site_var: if ( site.lower() == "subbedanime" or site.lower() == "dubbedanime" ): criteria = site_var.getOptions() for i in criteria: if siteName.lower() == i.lower(): siteName = i break else: return 0 for i, value in enumerate(original_path_name): search_field = value.lower() if search_exact: if " " in search_field: search_field = search_field.split(" ")[0] logger.info("search_field={0}".format(search_field)) if (search_term in search_field and not search_exact) or ( search_term == search_field and search_exact ): cur_row = i new_name_with_info = original_path_name[cur_row].strip() extra_info = "" logger.info( "cur_row={0}, new_name={1}".format(i, new_name_with_info) ) if " " in new_name_with_info: name = new_name_with_info.split(" ")[0] extra_info = new_name_with_info.split(" ")[1] else: name = new_name_with_info if ( site.lower() == "subbedanime" or site.lower() == "dubbedanime" ): hist_site = os.path.join( home, "History", site, siteName, name ) else: hist_site = os.path.join(home, "History", site, name) hist_epn = os.path.join(hist_site, "Ep.txt") logger.info(hist_epn) if os.path.exists(hist_epn): lines = open_files(hist_epn, True) m = [] for i in lines: i = i.strip() j = i.split(" ") if len(j) == 1: epnArrList.append(i + " " + i + " " + name) elif len(j) >= 2: epnArrList.append(i + " " + name) picn = os.path.join(hist_site, "poster.jpg") fanart = os.path.join(hist_site, "fanart.jpg") thumbnail = os.path.join(hist_site, "thumbnail.jpg") sum_file = os.path.join(hist_site, "summary.txt") summary = ui.get_summary_history(sum_file) elif site.lower() == "music": art_n = search_term music_dir = os.path.join(home, "Music") music_db = os.path.join(home, "Music", "Music.db") music_file = os.path.join(home, "Music", "Music.txt") music_file_bak = os.path.join(home, "Music", "Music_bak.txt") music_opt = site_option if music_opt: music_opt = music_opt[0].upper() + music_opt[1:] if "-" in music_opt: tmp = music_opt.split("-", 1) sub_tmp = tmp[1] music_opt = tmp[0] + "-" + sub_tmp[0].upper() + sub_tmp[1:] artist = [] logger.info(original_path_name) hash_srch = None hash_dir = None if search_term.endswith(".hash"): hash_srch = search_term.rsplit(".", 1)[0] logger.debug(hash_srch) for index, value in enumerate(original_path_name): if ( music_opt.lower() == "directory" or music_opt.lower() == "fav-directory" ): search_field = os.path.basename(value).lower() if hash_srch: hash_dir = bytes(value, "utf-8") else: search_field = value.lower() if hash_srch: if music_opt.lower().startswith("playlist"): hash_dir = bytes(value.split("\t")[1], "utf-8") else: hash_dir = bytes(value.split("\t")[0], "utf-8") # logger.debug(value) if hash_srch and hash_dir: h = hashlib.sha256(hash_dir) hash_val = h.hexdigest() if hash_val == hash_srch: search_term = search_field else: continue if (search_term in search_field and not search_exact) or ( search_term == search_field and search_exact ): if " " in value.lower(): art_n = value.split(" ")[0] else: art_n = value.strip() if music_opt.lower() == "directory": art_n = value if music_opt.lower() == "fav-directory": art_n = value if ( music_opt.lower() == "playlist" or music_opt.lower() == "playlists" ): pls = value.split(" ")[0] m = open_files(os.path.join(home, "Playlists", pls), True) for i in m: i = i.replace("\n", "") if i: j = i.split(" ") i1 = j[0] i2 = j[1] try: i3 = j[2] except: i3 = "None" artist.append(i1 + " " + i2 + " " + i3) else: music_opt = music_opt[0].upper() + music_opt[1:] if "-" in music_opt: tmp = music_opt.split("-", 1) sub_tmp = tmp[1] music_opt = tmp[0] + "-" + sub_tmp[0].upper() + sub_tmp[1:] m = ui.media_data.get_music_db(music_db, music_opt, art_n) for i in m: artist.append(i[1] + " " + i[2] + " " + i[0]) if search_term == search_field and search_exact: print("exact match:") break epnArrList[:] = [] for i in artist: epnArrList.append((i)) elif site.lower().startswith("playlist"): epnArrList = [] for index, value in enumerate(original_path_name): search_field = value.lower().split(" ")[0] if (search_term in search_field and not search_exact) or ( search_term == search_field and search_exact ): pls = value.split(" ")[0] file_path = os.path.join(home, "Playlists", str(pls)) if os.path.exists(file_path): lines = open_files(file_path, True) k = 0 for i in lines: i = i.strip() if i: if not search_exact: i = i + "##" + pls epnArrList.append(i) elif site.lower() == "video": epnArrList = [] hash_srch = None if search_term.endswith(".hash"): hash_srch = search_term.rsplit(".", 1)[0] logger.debug(hash_srch) for index, value in enumerate(original_path_name): if " " in value.lower(): art_n = value.split(" ")[0] else: art_n = value.strip() search_field = art_n.lower() if hash_srch: hash_dir = bytes(value.split("\t")[1], "utf-8") h = hashlib.sha256(hash_dir) hash_val = h.hexdigest() if hash_val == hash_srch: search_term = search_field else: continue if (search_term in search_field and not search_exact) or ( search_term == search_field and search_exact ): name = art_n video_dir = os.path.join(home, "VideoDB") logger.info("{0}--search-client--".format(art_n)) video_db = os.path.join(video_dir, "Video.db") video_file = os.path.join(video_dir, "Video.txt") video_file_bak = os.path.join(video_dir, "Video_bak.txt") artist = [] if not bookmark: video_opt = site_option[0].upper() + site_option[1:] print(video_opt, "---15112----") if ( video_opt.lower() == "update" or video_opt.lower() == "updateall" ): video_opt = "Available" n_art_n = original_path_name[index].split(" ")[-1] m = ui.media_data.get_video_db(video_db, "Directory", n_art_n) logger.info( "{0}--{1}--search-client--14534--".format(art_n, n_art_n) ) else: try: new_dir_path = search_field.split(" ")[-1] except Exception as e: print(e) logger.info(new_dir_path) if new_dir_path is not None: if new_dir_path.lower() != "none": m = ui.media_data.get_video_db( video_db, "Directory", new_dir_path ) else: m = ui.media_data.get_video_db( video_db, "Bookmark", art_n ) else: m = ui.media_data.get_video_db(video_db, "Bookmark", art_n) for i in m: artist.append(i[0] + " " + i[1] + " " + art_n) for i in artist: epnArrList.append((i)) dir_path = os.path.join(home, "Local", art_n) if os.path.exists(dir_path): picn = os.path.join(home, "Local", art_n, "poster.jpg") thumbnail = os.path.join(home, "Local", art_n, "thumbnail.jpg") fanart = os.path.join(home, "Local", art_n, "fanart.jpg") summary1 = os.path.join(home, "Local", art_n, "summary.txt") if os.path.exists(summary1): summary = open_files(summary1, False) else: summary = "Not Available" if search_term == search_field and search_exact: print("Exact Match:") break return epnArrList