def triggerPlaylist(self,value,url,title): print('Menu Clicked') print(value) file_path = os.path.join(self.home,'Playlists',str(value)) if '/' in title: title = title.replace('/','-') if '#' in title: title = title.replace('#','') if title.startswith('.'): title = title[1:] #print(title,url,file_path) if 'list=' in url: title = title + '-Playlist' img_u = '' if self.media_url: if 'ytimg.com' in self.media_url: img_u = self.media_url if 'playlist?list=' in url and img_u: yt_id = img_u.split('/')[-2] o_url = r'https://m.youtube.com/playlist?list=' n_url = 'https://m.youtube.com/watch?v='+yt_id+'&index=1&list=' url = url.replace(o_url,n_url) print(url,o_url,n_url) t = title + ' '+url+' '+'NONE' write_files(file_path,t,line_by_line=True) self.ui.update_playlist(file_path)
def _set_password(self): text_val = self.text_name.text() pass_val = self.text_pass.text() if not text_val: text_val = '' if not pass_val: pass_val = '' new_combine = bytes(text_val + ':' + pass_val, 'utf-8') new_txt = base64.b64encode(new_combine) new_txt_str = 'Basic ' + str(new_txt, 'utf-8') #print(new_txt, new_txt_str) new_txt_bytes = bytes(str(new_txt_str), 'utf-8') #print(new_txt_bytes) h = hashlib.sha256(new_txt_bytes) h_digest = h.hexdigest() new_pass = '******' + h_digest config_file = os.path.join(self.ui.home_folder, 'other_options.txt') content = open_files(config_file, lines_read=False) content = re.sub('AUTH=[^\n]*', new_pass, content) write_files(config_file, content, line_by_line=False) self.hide() self.ui.media_server_key = h_digest self.ui.client_auth_arr[:] = [] self.ui.client_auth_arr = ['127.0.0.1', '0.0.0.0'] if self.ui.local_ip not in self.ui.client_auth_arr: self.ui.client_auth_arr.append(self.ui.local_ip) if self.ui.local_ip_stream not in self.ui.client_auth_arr: self.ui.client_auth_arr.append(self.ui.local_ip_stream)
def add_playlist(self, value): value = value.replace('/', '-') value = value.replace('#', '') if value.startswith('.'): value = value[1:] file_path = os.path.join(self.home, 'Playlists', str(value)) new_pl = False j = 0 new_arr = [] for i in self.playlist_dict: yt_id = i title = self.playlist_dict[yt_id] title = title.replace('/', '-') title = title.replace('#', '') if title.startswith('.'): title = title[1:] n_url = 'https://m.youtube.com/watch?v=' + yt_id w = title + ' ' + n_url + ' ' + 'NONE' #if new_pl and j==0: # f.write(w) #else: # f.write('\n'+w) new_arr.append(w) j = j + 1 #f.close() write_files(file_path, new_arr, line_by_line=True) self.get_playlist = False
def triggerPlaylist(self, value, url, title): self.ui.logger.debug(value) file_path = os.path.join(self.home, "Playlists", str(value)) if "ytimg.com" in url: try: print(self.playlist_dict) yt_id = url.split("/")[-2] url = "https://m.youtube.com/watch?v=" + yt_id title = self.playlist_dict[yt_id] except Exception as err: self.ui.logger.error(err) if "/" in title: title = title.replace("/", "-") if "#" in title: title = title.replace("#", "") if title.startswith("."): title = title[1:] if "list=" in url: title = title + "-Playlist" title = title.replace("\\", " - ") img_u = "" if self.img_url: img_u = self.img_url.toString() if "playlist?list=" in url and img_u: try: yt_id = img_u.split("/")[-2] o_url = r"https://m.youtube.com/playlist?list=" n_url = "https://m.youtube.com/watch?v=" + yt_id + "&index=1&list=" url = url.replace(o_url, n_url) print(url, o_url, n_url) except Exception as err: self.ui.logger.error(err) self.ui.logger.debug("{}-{}-{}".format(title, url, file_path)) t = title + " " + url + " " + "NONE" write_files(file_path, t, line_by_line=True)
def triggerPlaylist(self, value, url, title): print('Menu Clicked') print(value) file_path = os.path.join(self.home, 'Playlists', str(value)) if '/' in title: title = title.replace('/', '-') if '#' in title: title = title.replace('#', '') if title.startswith('.'): title = title[1:] #print(title,url,file_path) if 'list=' in url: title = title + '-Playlist' img_u = '' if self.media_url: if 'ytimg.com' in self.media_url: img_u = self.media_url if 'playlist?list=' in url and img_u: yt_id = img_u.split('/')[-2] o_url = r'https://m.youtube.com/playlist?list=' n_url = 'https://m.youtube.com/watch?v=' + yt_id + '&index=1&list=' url = url.replace(o_url, n_url) print(url, o_url, n_url) t = title + ' ' + url + ' ' + 'NONE' write_files(file_path, t, line_by_line=True) self.ui.update_playlist(file_path)
def add_playlist(self,value): value = value.replace('/','-') value = value.replace('#','') if value.startswith('.'): value = value[1:] file_path = os.path.join(self.home,'Playlists',str(value)) new_pl = False j = 0 new_arr = [] for i in self.playlist_dict: yt_id = i title = self.playlist_dict[yt_id] title = title.replace('/','-') title = title.replace('#','') if title.startswith('.'): title = title[1:] n_url = 'https://m.youtube.com/watch?v='+yt_id w = title+' '+n_url+' '+'NONE' #if new_pl and j==0: # f.write(w) #else: # f.write('\n'+w) new_arr.append(w) j = j+1 #f.close() write_files(file_path,new_arr,line_by_line=True) self.get_playlist = False
def add_playlist(self, value): value = value.replace('/', '-') value = value.replace('#', '') if value.startswith('.'): value = value[1:] file_path = os.path.join(self.home, 'Playlists', str(value)) new_pl = False j = 0 new_arr = [] for i in self.playlist_dict: yt_id = i title = self.playlist_dict[yt_id] title = title.replace('/', '-') title = title.replace('#', '') if title.startswith('.'): title = title[1:] if os.name != 'posix': title = title.replace(':', '-') title = title.replace('|', '-') n_url = 'https://m.youtube.com/watch?v='+yt_id w = title+' '+n_url+' '+'NONE' new_arr.append(w) j = j+1 write_files(file_path, new_arr, line_by_line=True) self.get_playlist = False
def _set_params(self): new_ip_val = None new_ip_port = None torrent_ip = None try: if ':' in self.set_ip.text(): new_ip_val, new_ip_port1 = self.set_ip.text().split(':') new_ip_port = int(new_ip_port1) if ipaddress.ip_address(new_ip_val): ip = 'LOCAL_STREAM_IP=' + new_ip_val + ':' + str(new_ip_port) torrent_ip = 'TORRENT_STREAM_IP=' + new_ip_val + ':' + str( self.ui.local_port) except Exception as err_val: print(err_val, '--ip--find--error--') ip = 'LOCAL_STREAM_IP=' + self.ui.local_ip_stream new_ip_val = self.ui.local_ip_stream new_ip_port = 9001 if os.path.exists(self.set_default_download.text()): location = 'DEFAULT_DOWNLOAD_LOCATION=' + self.set_default_download.text( ) location_val = self.set_default_download.text() else: location = 'DEFAULT_DOWNLOAD_LOCATION=' + self.ui.default_download_location location_val = self.ui.default_download_location backg = self.backg.currentText() img_val = self.img_opt.currentIndex() if img_val == 0: img_val = 1 img_opt_str = 'IMAGE_FIT_OPTION=' + str(img_val) config_file = os.path.join(self.ui.home_folder, 'other_options.txt') lines = open_files(config_file, lines_read=True) new_lines = [] for i in lines: i = i.strip() if i.startswith('LOCAL_STREAM_IP='): i = ip elif i.startswith('DEFAULT_DOWNLOAD_LOCATION='): i = location elif i.startswith('KEEP_BACKGROUND_CONSTANT='): i = backg elif i.startswith('IMAGE_FIT_OPTION='): i = img_opt_str new_lines.append(i) write_files(config_file, new_lines, line_by_line=True) self.ui.local_ip_stream = new_ip_val self.ui.local_port_stream = new_ip_port self.ui.default_download_location = location_val self.ui.image_fit_option_val = img_val back_g = backg.split('=')[1] if back_g == 'no': self.ui.keep_background_constant = False else: self.ui.keep_background_constant = True if torrent_ip: config_file_torrent = os.path.join(self.ui.home_folder, 'torrent_config.txt') change_opt_file(config_file_torrent, 'TORRENT_STREAM_IP=', torrent_ip) self.ui.local_ip = new_ip_val self.hide()
def triggerPlaylist(self, value, url, title): self.ui.logger.debug(value) file_path = os.path.join(self.home, 'Playlists', str(value)) if 'ytimg.com' in url: try: print(self.playlist_dict) yt_id = url.split('/')[-2] url = 'https://m.youtube.com/watch?v=' + yt_id title = self.playlist_dict[yt_id] except Exception as err: self.ui.logger.error(err) if '/' in title: title = title.replace('/', '-') if '#' in title: title = title.replace('#', '') if title.startswith('.'): title = title[1:] if 'list=' in url: title = title + '-Playlist' title = title.replace('\\', ' - ') img_u = '' if self.img_url: img_u = self.img_url.toString() if 'playlist?list=' in url and img_u: try: yt_id = img_u.split('/')[-2] o_url = r'https://m.youtube.com/playlist?list=' n_url = 'https://m.youtube.com/watch?v=' + yt_id + '&index=1&list=' url = url.replace(o_url, n_url) print(url, o_url, n_url) except Exception as err: self.ui.logger.error(err) self.ui.logger.debug('{}-{}-{}'.format(title, url, file_path)) t = title + ' ' + url + ' ' + 'NONE' write_files(file_path, t, line_by_line=True)
def add_playlist(self, value): value = value.replace("/", "-") value = value.replace("#", "") if value.startswith("."): value = value[1:] file_path = os.path.join(self.home, "Playlists", str(value)) new_pl = False j = 0 new_arr = [] for i in self.playlist_dict: yt_id = i title = self.playlist_dict[yt_id] title = title.replace("/", "-") title = title.replace("#", "") if title.startswith("."): title = title[1:] if os.name != "posix": title = title.replace(":", "-") title = title.replace("|", "-") n_url = "https://m.youtube.com/watch?v=" + yt_id w = title + " " + n_url + " " + "NONE" new_arr.append(w) j = j + 1 write_files(file_path, new_arr, line_by_line=True) self.get_playlist = False
def got_curl_html(self,title,url,value): file_path = os.path.join(self.home,'Playlists',str(value)) if '/' in title: title = title.replace('/','-') t = title + ' '+url+' '+'NONE' write_files(file_path,t,line_by_line=True) self.ui.update_playlist(file_path)
def triggerPlaylist(self, value, url, title): self.ui.logger.debug(value) file_path = os.path.join(self.home, "Playlists", str(value)) if "/" in title: title = title.replace("/", "-") if "#" in title: title = title.replace("#", "") if title.startswith("."): title = title[1:] title = title.replace("\\", " - ") if os.name != "posix": title = title.replace(":", "-") title = title.replace("|", "-") if "list=" in url: title = title + "-Playlist" img_u = "" if self.media_url: if "ytimg.com" in self.media_url: img_u = self.media_url if "playlist?list=" in url and img_u: yt_id = img_u.split("/")[-2] o_url = r"https://m.youtube.com/playlist?list=" n_url = "https://m.youtube.com/watch?v=" + yt_id + "&index=1&list=" url = url.replace(o_url, n_url) print(url, o_url, n_url) t = title + " " + url + " " + "NONE" write_files(file_path, t, line_by_line=True)
def got_curl_html(self,title,url,value): file_path = os.path.join(self.home,'Playlists',str(value)) if '/' in title: title = title.replace('/','-') t = title + ' '+url+' '+'NONE' write_files(file_path,t,line_by_line=True) self.ui.update_playlist(file_path)
def got_curl_html(self, title, url, value): file_path = os.path.join(self.home, "Playlists", str(value)) if "/" in title: title = title.replace("/", "-") t = title + " " + url + " " + "NONE" write_files(file_path, t, line_by_line=True) self.ui.update_playlist(file_path)
def keyPressEvent(self, event): if event.key() == QtCore.Qt.Key_Down: nextr = self.currentRow() + 1 if nextr == self.count(): ui.goto_epn_filter_txt.setFocus() else: self.setCurrentRow(nextr) elif event.key() == QtCore.Qt.Key_Up: prev_r = self.currentRow() - 1 if self.currentRow() == 0: ui.goto_epn_filter_txt.setFocus() else: self.setCurrentRow(prev_r) elif event.key() == QtCore.Qt.Key_Return: ui.search_list5_options() elif event.key() == QtCore.Qt.Key_Q: site = ui.get_parameters_value(s='site')['site'] if (site == "Music" or site == "Video" or site == "Local" or site == "PlayLists" or site == "None"): file_path = os.path.join(home, 'Playlists', 'Queue') if not os.path.exists(file_path): f = open(file_path, 'w') f.close() if not ui.queue_url_list: ui.list6.clear() indx = self.currentRow() item = self.item(indx) if item: tmp = str(self.currentItem().text()) tmp1 = tmp.split(':')[0] r = int(tmp1) ui.queue_url_list.append(ui.epn_arr_list[r]) ui.list6.addItem(ui.epn_arr_list[r].split(' ')[0]) logger.info(ui.queue_url_list) write_files(file_path, ui.epn_arr_list[r], line_by_line=True)
def download(self, url, option): if option.lower() == 'play with kawaii-player': final_url = '' self.ui.epn_name_in_list = self.title_page print(self.ui.epn_name_in_list) if self.ui.mpvplayer_val.processId() > 0: self.ui.mpvplayer_val.kill() self.ui.mpvplayer_started = False if 'youtube.com' in url.toString() or 'ytimg.com' in url.toString( ): final_url = url.toString() else: final_url = 'ytdl:' + url.toString() self.ui.get_final_link(final_url, self.ui.quality_val, self.ui.ytdl_path, self.ui.logger, self.ui.epn_name_in_list, self.hdr) elif option.lower() == 'add as local playlist': self.get_playlist = True if self.playlist_dict: print(self.get_playlist, '=get_playlist') self.add_playlist(self.playlist_name) elif option.lower() == 'download': if self.ui.quality_val == 'sd480p': txt = "Video can't be saved in 480p, Saving in either HD or SD" send_notification(txt) quality = 'hd' else: quality = self.ui.quality_val finalUrl = get_yt_url(url.toString(), quality, self.ui.ytdl_path, self.ui.logger, mode='offline') finalUrl = finalUrl.replace('\n', '') title = self.title_page + '.mp4' title = title.replace('"', '') title = title.replace('/', '-') if os.path.exists(self.ui.default_download_location): title = os.path.join(self.ui.default_download_location, title) else: title = os.path.join(self.ui.tmp_download_folder, title) command = wget_string(finalUrl, title, self.ui.get_fetch_library) print(command) self.ui.infoWget(command, 0) elif option.lower() == 'get subtitle (if available)': self.ui.epn_name_in_list = self.title_page print(self.ui.epn_name_in_list) get_yt_sub(url.toString(), self.ui.epn_name_in_list, self.yt_sub_folder, self.ui.tmp_download_folder, self.ui.ytdl_path, self.ui.logger) elif option.lower() == 'queue item': file_path = os.path.join(self.home, 'Playlists', 'Queue') if not os.path.exists(file_path): f = open(file_path, 'w') f.close() if not self.ui.queue_url_list: self.ui.list6.clear() title = self.title_page.replace('/', '-') if title.startswith('.'): title = title[1:] r = title + ' ' + url.toString() + ' ' + 'NONE' self.ui.queue_url_list.append(r) self.ui.list6.addItem(title) print(self.ui.queue_url_list) write_files(file_path, r, line_by_line=True) elif option.lower() == 'season episode link': if self.site != "Music" and self.site != "PlayLists": my_copy = self.ui.epn_arr_list.copy() r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.metaengine.getTvdbEpnInfo(url.toString(), site=self.site, epn_arr=my_copy, name=nm, row=r) elif (option.lower() == 'artist link' or option.lower() == 'series link' or option.lower() == 'series/movie link'): url = url.toString() r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=True, copy_poster=True, copy_fanart=True) else: url = url.toString() if url: t_content = ccurl(url, curl_opt='-I') if 'image/jpeg' in t_content and not 'Location:' in t_content: pass elif 'image/jpeg' in t_content and 'Location:' in t_content: m = re.findall('Location: [^\n]*', t_content) found = re.sub('Location: |\r', '', m[0]) url = found elif not self.img_url.isEmpty(): url = self.img_url.toString() else: return 0 if '#' in url: url = url.split('#')[0] if option.lower() == "download as fanart": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) print(option, '----') self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=False, copy_fanart=True) elif option.lower() == "download as cover": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=True, copy_fanart=False)
def getTvdbEpnInfo(self, url, epn_arr=None, name=None, site=None, row=None, thread=None, video_dir=None): epn_arr_list = epn_arr.copy() content = ccurl(url) soup = BeautifulSoup(content, 'lxml') m = [] link1 = soup.find('div', {'class': 'section'}) if not link1: return 0 link = link1.findAll('td') n = [] """ep_dict = {epn_key:[sr, name, img_url, date, ep_url]}""" ep_dict = {} special_count = 0 ep_count = 0 image_dict = {} index_count = 0 length_link = len(link) for i in range(4, len(link), 4): j = k = l = p = epurl = '' jj = link[i].find('a') if jj: j = jj.text if 'href' in str(jj): epurl = jj['href'] if not epurl.startswith('http'): if epurl.startswith('/'): epurl = 'https://thetvdb.com' + epurl else: epurl = 'https://thetvdb.com/' + epurl if i + 1 < length_link: kk = link[i + 1].find('a') if kk: k = kk.text else: k = '' if i + 2 < length_link: l = link[i + 2].text if i + 3 < length_link: p = link[i + 3].find('img') if p: img_lnk = link[i].find('a')['href'] lnk = img_lnk.split('&') series_id = lnk[1].split('=')[-1] poster_id = lnk[3].split('=')[-1] q = "http://thetvdb.com/banners/episodes/" + series_id + '/' + poster_id + '.jpg' else: q = "NONE" j = j.replace(' ', '') if j == '1x0': continue k = k.replace('/', '-') k = k.replace(':', '-') t = j + ' ' + k + ':' + q if j: j = j.lower().strip() key = None s = e = '' if 'x' in j: s, e = j.split('x', 1) ep_count += 1 elif j == 'special': special_count += 1 s = '0' e = str(special_count) else: e = j ni = '' if index_count < len(epn_arr_list): if '\t' in epn_arr_list[index_count]: ni = epn_arr_list[index_count].split('\t')[0] else: ni = epn_arr_list[index_count] if ni.startswith('#'): ni = ni.replace('#', '', 1) if ni: epn_value, sn = self.find_episode_key_val( ni, index=index_count, season=True) if sn >= 0: s = str(sn) j = s + 'x' + j ep_count += 1 if s and e: s = s.strip() e = e.strip() key = 's' + s + 'e' + e if key: ep_dict.update({key: [j, k, q, l, epurl]}) if j == 'special': n.append(t) else: m.append(t) key = 'e' + str(ep_count) ep_dict.update({key: [j, k, q, l, epurl]}) index_count += 1 for i in ep_dict: logger.debug('\nkey:{0} value:{1}\n'.format(i, ep_dict[i])) new_arr = [] for i, val in enumerate(epn_arr_list): if '\t' in val: name_val, extra = val.split('\t', 1) else: name_val = val extra = '' watched = False if name_val.startswith('#'): name_val = name_val.replace('#', '', 1) watched = True lower_case = name_val.lower() key_found = False ep_val = None ep_patn = self.find_episode_key_val(lower_case, index=i) if ep_patn: ep_val = ep_dict.get(ep_patn) if ep_val: key_found = True if key_found: new_name = ep_val[0] + ' ' + ep_val[1] """image_dict={sr:[img_url, date, ep_url, local_path, site]}""" image_dict.update( {new_name: [ep_val[2], ep_val[3], ep_val[4], extra, site]}) if extra: new_val = new_name + '\t' + extra else: new_val = new_name + '\t' + name_val if watched: new_val = '#' + new_val else: new_val = val new_arr.append(new_val) if new_arr: epn_arr_list = self.epn_list = new_arr.copy() if site == "Video": video_db = os.path.join(home, 'VideoDB', 'Video.db') conn = sqlite3.connect(video_db) cur = conn.cursor() for r, val in enumerate(epn_arr_list): txt = val.split(' ')[1] ep_name = val.split(' ')[0] qr = 'Update Video Set EP_NAME=? Where Path=?' cur.execute(qr, (ep_name, txt)) conn.commit() conn.close() try: txt = None if row is None: if video_dir: txt = video_dir else: txt = ui.original_path_name[row].split(' ')[1] if txt in ui.video_dict: del ui.video_dict[txt] except Exception as err: print(err, '--4240---') elif site == 'Music' or site == 'PlayLists' or site == 'NONE': pass else: if site.lower() == 'subbedanime' or site.lower() == 'dubbedanime': siteName = ui.get_parameters_value(s='siteName')['siteName'] file_path = os.path.join(home, 'History', site, siteName, name, 'Ep.txt') else: file_path = os.path.join(home, 'History', site, name, 'Ep.txt') if os.path.exists(file_path): write_files(file_path, epn_arr_list, line_by_line=True) logger.debug('<<<<<<<{0}>>>>>>>>'.format(file_path)) if thread is None: ui.update_list2(epn_arr=epn_arr_list) if thread: thread.image_dict_list = image_dict.copy() if site.lower() == 'video': thread.dest_dir = os.path.join(home, 'thumbnails', 'thumbnail_server') else: thread.dest_dir = os.path.join(home, "thumbnails", name) thread.site = site else: if site.lower() == 'video': dest_dir = os.path.join(home, 'thumbnails', 'thumbnail_server') else: dest_dir = os.path.join(home, "thumbnails", name) if thread is None: update_image_list_method(image_dict, dest_dir, site)
def map_episodes(self, tvdb_dict=None, epn_arr=None, name=None, site=None, row=None, video_dir=None): epn_arr_list = epn_arr.copy() ep_dict = tvdb_dict.copy() sr_dict = {} ep_list = [] sp = 0 for key, value in ep_dict.items(): if value and not key.startswith('0x'): ep_list.append(value) elif key.startswith('0x'): sp += 1 for i, j in enumerate(ep_list): key = j[0] - sp + 1 sr_dict.update({key:j}) if site.lower() == 'video': dest_dir = os.path.join(home, 'thumbnails', 'thumbnail_server') else: dest_dir = os.path.join(home, "thumbnails", name) if not os.path.exists(dest_dir): os.makedirs(dest_dir) new_arr = [] for i, val in enumerate(epn_arr_list): if '\t' in val: name_val, extra = val.split('\t', 1) else: name_val = val extra = '' watched = False if name_val.startswith('#'): name_val = name_val.replace('#', '', 1) watched = True lower_case = name_val.lower() key_found = False ep_val = None ep_patn = self.find_episode_key_val(lower_case, index=i) print(ep_patn, lower_case) if ep_patn: e = -1 if ep_patn.startswith('s') or ep_patn.startswith('e'): if ep_patn.startswith('s'): ep_patn = ep_patn[1:] s, e = ep_patn.split('e') if s: s = int(s) else: s = 1 e = int(e) ep_patn = str(s) + 'x' + str(e) print(ep_patn, 'final...') ep_val = ep_dict.get(ep_patn) if not ep_val and e != -1: ep_val = sr_dict.get(e) if ep_val: key_found = True if key_found: if ep_val[3] is None: ep_val[3] = "None" new_name = ep_val[1]+ ' ' + ep_val[3].replace('/', ' - ') summary = 'Air Date: {}\n\n{}: {}\n\n{}'.format(ep_val[-3], ep_val[1], ep_val[3], ep_val[-1]) img_url = ep_val[-2] if extra: new_val = new_name + '\t' + extra else: new_val = new_name+ '\t' + name_val if watched: new_val = '#' + new_val if site.lower() == 'video': if '\t' in extra: path = extra.split('\t')[0] else: path = extra path = path.replace('"', '') thumb_name_bytes = bytes(path, 'utf-8') h = hashlib.sha256(thumb_name_bytes) thumb_name = h.hexdigest() dest_txt = os.path.join(dest_dir, thumb_name+'.txt') dest_picn = os.path.join(dest_dir, thumb_name+'.jpg') else: dest_txt = os.path.join(dest_dir, new_name+'.txt') dest_picn = os.path.join(dest_dir, new_name+'.jpg') write_files(dest_txt, summary, line_by_line=False) self.remove_extra_thumbnails(dest_picn) if img_url and img_url.startswith('http'): ui.vnt.get( img_url, wait=0.1, out=dest_picn, onfinished=partial(self.finished_thumbnails, i, new_name, summary, dest_picn) ) else: new_val = val new_arr.append(new_val) if new_arr: epn_arr_list = self.epn_list = new_arr.copy() if site == "Video": video_db = os.path.join(ui.home_folder, 'VideoDB', 'Video.db') conn = sqlite3.connect(video_db) cur = conn.cursor() for r, val in enumerate(epn_arr_list): txt = val.split(' ')[1] ep_name = val.split(' ')[0] qr = 'Update Video Set EP_NAME=? Where Path=?' cur.execute(qr, (ep_name, txt)) conn.commit() conn.close() try: txt = None if row is None: if video_dir: txt = video_dir else: txt = ui.original_path_name[row].split(' ')[1] if txt in ui.video_dict: del ui.video_dict[txt] except Exception as err: print(err, '--4240---') elif site == 'Music' or site == 'PlayLists' or site == 'NONE': pass else: if site.lower() == 'subbedanime' or site.lower() == 'dubbedanime': siteName = ui.get_parameters_value(s='siteName')['siteName'] file_path = os.path.join(home, 'History', site, siteName, name, 'Ep.txt') else: file_path = os.path.join(home, 'History', site, name, 'Ep.txt') if os.path.exists(file_path): write_files(file_path, epn_arr_list, line_by_line=True) logger.debug('<<<<<<<{0}>>>>>>>>'.format(file_path))
def got_curl_html(self, title, url, file_path): t = title + ' ' + url + ' ' + 'NONE' write_files(file_path, t, line_by_line=True) self.ui.update_playlist(file_path)
def download(self, url,option): if option.lower() == 'play with animewatch': final_url = '' self.ui.epn_name_in_list = self.title_page print(self.ui.epn_name_in_list) if self.ui.mpvplayer_val.processId() > 0: self.ui.mpvplayer_val.kill() final_url = get_yt_url(url.toString(),self.ui.quality_val,self.ui.ytdl_path,self.ui.logger) if final_url: self.ui.watchDirectly(final_url,self.ui.epn_name_in_list,'no') self.ui.tab_5.show() self.ui.frame1.show() self.ui.tab_2.setMaximumWidth(self.ui.width_allowed+50) elif option.lower() == 'add as local playlist': self.get_playlist = True if self.playlist_dict: print(self.get_playlist,'=get_playlist') self.add_playlist(self.playlist_name) elif option.lower() == 'download': if self.ui.quality_val == 'sd480p': txt = "Video can't be saved in 480p, Saving in either HD or SD" send_notification(txt) quality = 'hd' else: quality = self.ui.quality_val finalUrl = get_yt_url(url.toString(),quality,self.ui.ytdl_path,self.ui.logger) finalUrl = finalUrl.replace('\n','') title = self.title_page+'.mp4' title = title.replace('"','') title = title.replace('/','-') if os.path.exists(self.ui.default_download_location): title = os.path.join(self.ui.default_download_location,title) else: title = os.path.join(self.ui.tmp_download_folder,title) command = wget_string(finalUrl,title,self.ui.get_fetch_library) print (command) self.ui.infoWget(command,0) elif option.lower() == 'get subtitle (if available)': self.ui.epn_name_in_list = self.title_page print(self.ui.epn_name_in_list) get_yt_sub(url.toString(),self.ui.epn_name_in_list, self.yt_sub_folder,self.ui.tmp_download_folder, self.ui.ytdl_path,self.ui.logger) elif option.lower() == 'queue item': file_path = os.path.join(self.home,'Playlists','Queue') if not os.path.exists(file_path): f = open(file_path,'w') f.close() if not self.ui.queue_url_list: self.ui.list6.clear() title = self.title_page.replace('/','-') if title.startswith('.'): title = title[1:] r = title + ' '+url.toString()+' '+'NONE' self.ui.queue_url_list.append(r) self.ui.list6.addItem(title) print (self.ui.queue_url_list) write_files(file_path,r,line_by_line=True) elif option.lower() == 'season episode link': if self.site != "Music" and self.site != "PlayLists": self.ui.getTvdbEpnInfo(url.toString()) elif option.lower() == 'artist link' or option.lower() == 'series link': url = url.toString() r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new( name=nm,site=self.site,url=url,direct_url=True, copy_summary=True,copy_poster=True,copy_fanart=True) else: url = url.toString() if url: t_content = ccurl(url+'#'+'-I') if 'image/jpeg' in t_content and not 'Location:' in t_content: pass elif 'image/jpeg' in t_content and 'Location:' in t_content: m = re.findall('Location: [^\n]*',t_content) found = re.sub('Location: |\r','',m[0]) url = found elif not self.img_url.isEmpty(): url = self.img_url.toString() else: return 0 if option.lower() == "download as fanart": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) print(option,'----') self.ui.posterfound_new( name=nm,site=self.site,url=url,direct_url=True, copy_summary=False,copy_poster=False,copy_fanart=True) elif option.lower() == "download as cover": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new( name=nm,site=self.site,url=url,direct_url=True, copy_summary=False,copy_poster=True,copy_fanart=False)
def download(self, url, option, copy_summary=None): if option.lower() == "play with kawaii-player": final_url = "" self.ui.epn_name_in_list = self.title_page self.ui.logger.info(self.ui.epn_name_in_list) if self.ui.mpvplayer_val.processId() > 0: self.ui.mpvplayer_val.kill() self.ui.mpvplayer_started = False if "youtube.com" in url or "ytimg.com" in url: pass else: url = "ytdl:" + url self.ui.get_final_link( url, self.ui.quality_val, self.ui.ytdl_path, self.ui.logger, self.ui.epn_name_in_list, self.hdr, ) elif option.lower() in ["cast this item", "cast queue"]: if option.lower() == "cast queue": self.ui.list2.process_browser_based_url( copy_summary, url, "queue") else: self.ui.list2.process_browser_based_url( copy_summary, url, "single") elif option.lower() == "add as local playlist": self.get_playlist = True if self.playlist_dict: self.ui.logger.info(self.get_playlist, "=get_playlist") self.add_playlist(self.playlist_name) elif option.lower() == "download": if self.ui.quality_val == "sd480p": txt = "Video can't be saved in 480p, Saving in either HD or SD" send_notification(txt) quality = "hd" else: quality = self.ui.quality_val finalUrl = self.ui.yt.get_yt_url(url, quality, self.ui.ytdl_path, self.ui.logger, mode="offline") finalUrl = finalUrl.replace("\n", "") title = self.title_page + ".mp4" title = title.replace('"', "") title = title.replace("/", "-") if os.path.exists(self.ui.default_download_location): title = os.path.join(self.ui.default_download_location, title) else: title = os.path.join(self.ui.tmp_download_folder, title) command = wget_string(finalUrl, title, self.ui.get_fetch_library) self.ui.logger.debug(command) self.ui.infoWget(command, 0) elif option.lower() == "queue item": file_path = os.path.join(self.home, "Playlists", "Queue") if not os.path.exists(file_path): f = open(file_path, "w") f.close() if not self.ui.queue_url_list: self.ui.list6.clear() title = self.title_page.replace("/", "-") if title.startswith("."): title = title[1:] r = title + " " + url + " " + "NONE" self.ui.queue_url_list.append(r) self.ui.list6.addItem(title) write_files(file_path, r, line_by_line=True) elif option.lower() == "season episode link": if self.site != "Music" and self.site != "PlayLists": my_copy = self.ui.epn_arr_list.copy() r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) video_dir = None if self.site.lower() == "video": video_dir = self.ui.original_path_name[r].split("\t")[-1] elif self.site.lower() in ["playlists", "none", "music"]: pass else: video_dir = self.ui.original_path_name[r] self.ui.posterfound_new( name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=False, copy_fanart=False, get_sum=True, video_dir=video_dir, ) elif (option.lower() == "artist link" or option.lower() == "series link" or option.lower() == "series/movie link"): r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new( name=nm, site=self.site, url=url, direct_url=True, copy_summary=True, copy_poster=True, copy_fanart=True, ) elif option.lower() == "copy summary": self.ui.copySummary(copy_sum=copy_summary) else: if not url: url = self.media_url self.ui.logger.debug("{}--{}--media-url--".format( url, self.media_url)) if url: t_content = ccurl(url, curl_opt="-I") if "image/jpeg" in t_content and not "Location:" in t_content: pass elif "image/jpeg" in t_content and "Location:" in t_content: m = re.findall("Location: [^\n]*", t_content) found = re.sub("Location: |\r", "", m[0]) url = found elif self.media_url: url = self.media_url else: return 0 if option.lower() == "download as fanart": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) print(option, "----") self.ui.posterfound_new( name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=False, copy_fanart=True, ) elif option.lower() == "download as cover": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new( name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=True, copy_fanart=False, )
def record_torrent(self, item, hist_folder): tmp_dir = TMPDIR name = '' if not os.path.exists(hist_folder): os.makedirs(hist_folder) if item.startswith('http') or os.path.isfile(item): home = hist_folder name1 = os.path.basename(item).replace('.torrent', '') torrent_dest1 = os.path.join(tmp_dir, name1+'.torrent') if not os.path.exists(torrent_dest1): if item.startswith('http'): ccurl(item+'#'+'-o'+'#'+torrent_dest1) else: shutil.copy(item, torrent_dest1) if os.path.exists(torrent_dest1): info = lt.torrent_info(torrent_dest1) name = info.name() torrent_dest = os.path.join(home, name+'.torrent') shutil.copy(torrent_dest1, torrent_dest) logger.info(name) elif item.startswith('magnet:'): torrent_handle, stream_session, info = get_torrent_info_magnet( item, tmp_dir, self, ui.progress, tmp_dir) torrent_file = lt.create_torrent(info) home = hist_folder name = info.name() torrent_dest = os.path.join(home, name+'.torrent') with open(torrent_dest, "wb") as f: f.write(lt.bencode(torrent_file.generate())) torrent_handle.pause() stream_session.pause() ui.stop_torrent_forcefully() if name: torrent_dest = os.path.join(home, name+'.torrent') info = lt.torrent_info(torrent_dest) file_arr = [] for f in info.files(): file_path = f.path file_path = os.path.basename(file_path) file_arr.append(file_path) if file_arr: hist_path = os.path.join(home, 'history.txt') if not os.path.isfile(hist_path): hist_dir, last_field = os.path.split(hist_path) if not os.path.exists(hist_dir): os.makedirs(hist_dir) f = open(hist_path, 'w').close() if os.path.isfile(hist_path): if (os.stat(hist_path).st_size == 0): write_files(hist_path, name, line_by_line=True) else: lines = open_files(hist_path, True) line_list = [] for i in lines: i = i.strip() line_list.append(i) if name not in line_list: write_files(hist_path, name, line_by_line=True) hist_site = os.path.join(hist_folder, name) if not os.path.exists(hist_site): try: os.makedirs(hist_site) hist_epn = os.path.join(hist_site, 'Ep.txt') write_files(hist_epn, file_arr, line_by_line=True) except Exception as e: print(e) return name
def run(self): name = self.name1 name2 = name.replace(' ', '+') if name2 != 'NONE': url = "https://www.last.fm/search?q=" + name2 logger.info(url) wiki = "" content = ccurl(url) soup = BeautifulSoup(content, 'lxml') link = soup.findAll('div', {'class': 'row clearfix'}) logger.info('{0}-{1}'.format(link, 253)) name3 = "" for i in link: j = i.findAll('a') for k in j: try: url = k['href'] if '?q=' not in url: logger.info(url) break except: pass logger.info(url) if url.startswith('http'): url = url else: url = "https://www.last.fm" + url logger.info(url) img_url = url + '/+images' wiki_url = url + '/+wiki' logger.info(wiki_url) content = ccurl(wiki_url) soup = BeautifulSoup(content, 'lxml') link = soup.find('div', {'class': 'wiki-content'}) if link: wiki = link.text content = ccurl(img_url) soup = BeautifulSoup(content, 'lxml') link = soup.findAll('ul', {'class': 'image-list'}) img = [] for i in link: j = i.findAll('img') for k in j: l = k['src'] u1 = l.rsplit('/', 2)[0] u2 = l.split('/')[-1] u = u1 + '/770x0/' + u2 img.append(u) img = list(set(img)) logger.info(len(img)) tmp_bio = os.path.join(TMPDIR, name + '-bio.txt') write_files(tmp_bio, wiki, line_by_line=False) thumb = os.path.join(TMPDIR, name + '.jpg') if img: url = img[0] try: ccurl(url + '#' + '-o' + '#' + thumb) except Exception as err: print(err, '--623--') tmp_n = os.path.join(TMPDIR, name + '.txt') write_files(tmp_n, img, line_by_line=True)
def test_write_append(self): content = 'append this line' self.assertIsNone( write_files(test_file, content, line_by_line=True, mode='test'))
def download(self, url, option, copy_summary=None): if option.lower() == 'play with kawaii-player': final_url = '' self.ui.epn_name_in_list = self.title_page self.ui.logger.info(self.ui.epn_name_in_list) if self.ui.mpvplayer_val.processId() > 0: self.ui.mpvplayer_val.kill() self.ui.mpvplayer_started = False if 'youtube.com' in url or 'ytimg.com' in url: pass else: url = 'ytdl:' + url self.ui.get_final_link(url, self.ui.quality_val, self.ui.ytdl_path, self.ui.logger, self.ui.epn_name_in_list, self.hdr) elif option.lower() in ["cast this item", "cast queue"]: if option.lower() == "cast queue": self.ui.list2.process_browser_based_url( copy_summary, url, "queue") else: self.ui.list2.process_browser_based_url( copy_summary, url, "single") elif option.lower() == 'add as local playlist': self.get_playlist = True if self.playlist_dict: self.ui.logger.info(self.get_playlist, '=get_playlist') self.add_playlist(self.playlist_name) elif option.lower() == 'download': if self.ui.quality_val == 'sd480p': txt = "Video can't be saved in 480p, Saving in either HD or SD" send_notification(txt) quality = 'hd' else: quality = self.ui.quality_val finalUrl = self.ui.yt.get_yt_url(url, quality, self.ui.ytdl_path, self.ui.logger, mode='offline') finalUrl = finalUrl.replace('\n', '') title = self.title_page + '.mp4' title = title.replace('"', '') title = title.replace('/', '-') if os.path.exists(self.ui.default_download_location): title = os.path.join(self.ui.default_download_location, title) else: title = os.path.join(self.ui.tmp_download_folder, title) command = wget_string(finalUrl, title, self.ui.get_fetch_library) self.ui.logger.debug(command) self.ui.infoWget(command, 0) elif option.lower() == 'queue item': file_path = os.path.join(self.home, 'Playlists', 'Queue') if not os.path.exists(file_path): f = open(file_path, 'w') f.close() if not self.ui.queue_url_list: self.ui.list6.clear() title = self.title_page.replace('/', '-') if title.startswith('.'): title = title[1:] r = title + ' ' + url + ' ' + 'NONE' self.ui.queue_url_list.append(r) self.ui.list6.addItem(title) write_files(file_path, r, line_by_line=True) elif option.lower() == 'season episode link': if self.site != "Music" and self.site != "PlayLists": my_copy = self.ui.epn_arr_list.copy() r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) video_dir = None if self.site.lower() == 'video': video_dir = self.ui.original_path_name[r].split('\t')[-1] elif self.site.lower() in ['playlists', 'none', 'music']: pass else: video_dir = self.ui.original_path_name[r] self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=False, copy_fanart=False, get_sum=True, video_dir=video_dir) elif (option.lower() == 'artist link' or option.lower() == 'series link' or option.lower() == 'series/movie link'): r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=True, copy_poster=True, copy_fanart=True) elif option.lower() == 'copy summary': self.ui.copySummary(copy_sum=copy_summary) else: if not url: url = self.media_url self.ui.logger.debug('{}--{}--media-url--'.format( url, self.media_url)) if url: t_content = ccurl(url, curl_opt='-I') if 'image/jpeg' in t_content and not 'Location:' in t_content: pass elif 'image/jpeg' in t_content and 'Location:' in t_content: m = re.findall('Location: [^\n]*', t_content) found = re.sub('Location: |\r', '', m[0]) url = found elif self.media_url: url = self.media_url else: return 0 if option.lower() == "download as fanart": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) print(option, '----') self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=False, copy_fanart=True) elif option.lower() == "download as cover": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=True, copy_fanart=False)
def test_write_block(self): content = 'write this line' self.assertIsNone( write_files(test_file, content, line_by_line=False, mode='test'))
def test_write_line_by_line_list(self): content = ['This', 'is', 'Test', 'Case'] self.assertIsNone( write_files(test_file, content, line_by_line=True, mode='test'))
def download(self, url, option, copy_summary=None): if option.lower() == 'play with kawaii-player': final_url = '' self.ui.epn_name_in_list = self.title_page self.ui.logger.info(self.ui.epn_name_in_list) if self.ui.mpvplayer_val.processId() > 0: self.ui.mpvplayer_val.kill() final_url = get_yt_url(url, self.ui.quality_val, self.ui.ytdl_path, self.ui.logger) if final_url: self.ui.watchDirectly(final_url, self.ui.epn_name_in_list, 'no') self.ui.tab_5.show() self.ui.frame1.show() self.ui.tab_2.setMaximumWidth(400) elif option.lower() == 'add as local playlist': self.get_playlist = True if self.playlist_dict: self.ui.logger.info(self.get_playlist, '=get_playlist') self.add_playlist(self.playlist_name) elif option.lower() == 'download': if self.ui.quality_val == 'sd480p': txt = "Video can't be saved in 480p, Saving in either HD or SD" send_notification(txt) quality = 'hd' else: quality = self.ui.quality_val finalUrl = get_yt_url(url, quality, self.ui.ytdl_path, self.ui.logger) finalUrl = finalUrl.replace('\n', '') title = self.title_page + '.mp4' title = title.replace('"', '') title = title.replace('/', '-') if os.path.exists(self.ui.default_download_location): title = os.path.join(self.ui.default_download_location, title) else: title = os.path.join(self.ui.tmp_download_folder, title) command = wget_string(finalUrl, title, self.ui.get_fetch_library) print(command) self.ui.infoWget(command, 0) elif option.lower() == 'get subtitle (if available)': self.ui.epn_name_in_list = self.title_page self.ui.logger.info(self.ui.epn_name_in_list) get_yt_sub(url, self.ui.epn_name_in_list, self.yt_sub_folder, self.ui.tmp_download_folder, self.ui.ytdl_path, self.ui.logger) elif option.lower() == 'queue item': file_path = os.path.join(self.home, 'Playlists', 'Queue') if not os.path.exists(file_path): f = open(file_path, 'w') f.close() if not self.ui.queue_url_list: self.ui.list6.clear() title = self.title_page.replace('/', '-') if title.startswith('.'): title = title[1:] r = title + ' ' + url + ' ' + 'NONE' self.ui.queue_url_list.append(r) self.ui.list6.addItem(title) print(self.ui.queue_url_list) write_files(file_path, r, line_by_line=True) elif option.lower() == 'season episode link': if self.site != "Music" and self.site != "PlayLists": self.ui.getTvdbEpnInfo(url) elif option.lower() == 'artist link' or option.lower( ) == 'series link': r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=True, copy_poster=True, copy_fanart=True) elif option.lower() == 'copy summary': self.ui.copySummary(copy_sum=copy_summary) else: if not url: url = self.media_url print(url, self.media_url, '--media--url--') if url: t_content = ccurl(url + '#' + '-I') if 'image/jpeg' in t_content and not 'Location:' in t_content: pass elif 'image/jpeg' in t_content and 'Location:' in t_content: m = re.findall('Location: [^\n]*', t_content) found = re.sub('Location: |\r', '', m[0]) url = found elif self.media_url: url = self.media_url else: return 0 if option.lower() == "download as fanart": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) print(option, '----') self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=False, copy_fanart=True) elif option.lower() == "download as cover": r = self.ui.list1.currentRow() nm = self.ui.get_title_name(r) self.ui.posterfound_new(name=nm, site=self.site, url=url, direct_url=True, copy_summary=False, copy_poster=True, copy_fanart=False)
def got_curl_html(self,title,url,file_path): t = title + ' '+url+' '+'NONE' write_files(file_path,t,line_by_line=True) self.ui.update_playlist(file_path)