def cmdline(series, url, ignore_downloaded, execute): best = 0 file = url.split("/")[-1] d = DIRS[series] fullpath = os.path.join(d, file + ".flv") if ignore_downloaded and os.path.exists(fullpath.encode("utf-8")): return None tmppath = fullpath + ".tmp" if os.path.exists(tmppath.encode("utf-8")): if resume: print "Resuming download:", fullpath.encode("utf-8") else: print "Restarting download:", fullpath.encode("utf-8") else: print "New download:", fullpath.encode("utf-8") best_alt = None all_cmds = pirateplay.generate_getcmd("http://www.svtplay.se" + url, False, output_file=tmppath) try: non_dups = pirateplay.remove_duplicates(all_cmds) except ValueError: print "Cannot find cmd for", fullpath return None bitrates = [] for alt in non_dups: lines = alt.splitlines() m = re.search("quality: ([0-9]*)", lines[0]) if not m: print "SKIPPING", alt continue q = int(m.group(1)) bitrates.append(q) if q > best: best = q best_alt = alt if not best_alt: print "No bitrate match found for", url return None lines = best_alt.splitlines() exe = lines[1] if resume: exe = exe.replace(" ", " --resume ", 1) print "Selecting bitrate", best, "among", ', '.join(map(str, sorted(bitrates))) if execute: return Downloader(exe, d, tmppath, fullpath) return exe
def main(): def print_help(s): help_win.clear() help_win.addstr(s) help_win.noutrefresh() curses.doupdate() def download(filename, cmd): child = subprocess.Popen(shlex.split(cmd + ' -o ' + filename), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) complete = False buffered = '' start = False while True: out = child.stderr.read(7) if out == '' and child.poll() != None: print_help('Nedladdning klar!') break elif len(out) > 1: if out[0] == '(' and out[1] != 'c': print_help('Laddar ner - ' + out[1:out.find(')')]) selected_item = 0 curses.start_color() curses.noecho() curses.cbreak() curses.curs_set(0) stdscr.keypad(1) #stdscr.nodelay(1) logo_name = 'Pirateplayer' curses.init_pair(1, curses.COLOR_RED, curses.COLOR_RED) logo_attr = curses.color_pair(1) stdscr.addstr(1, 2, ' ', logo_attr) stdscr.addstr(1, 2+len(logo_name)+6, ' ', logo_attr) stdscr.addstr(2, 2, ' ', logo_attr) stdscr.addstr(2, 2+len(logo_name)+5, ' ', logo_attr) stdscr.addstr(2, 6, logo_name) stdscr.addstr(3, 2, ' ', logo_attr) stdscr.addstr(3, 2+len(logo_name)+6, ' ', logo_attr) stdscr.noutrefresh() #pkl_file = open('data.pkl', 'rb') pkl_file = urllib2.urlopen('http://pirateplay.se/data.pkl') data = pickle.load(pkl_file) data_list = dict2list(data) menu = Menu(data_list, curses.newwin(10, stdscr.getmaxyx()[1]-2, 5, 2), 0, 0) play_menu = Menu([], curses.newwin(5, 20, 16, 2), 0, 0) help_win = curses.newwin(5, stdscr.getmaxyx()[1]-2, 25, 2) print_help('Navigera med piltangenterna\nQ för att avsluta') menu.set_focus(True) curses.doupdate() href = '' cmd = '' while True: c = stdscr.getch() if c == ord('q'): break if not menu.push_char(c): data = menu.get_current('data') if data: streams = [] for rtmp_cmd in remove_duplicates(generate_getcmd(data.encode('utf-8'))): streams.append( { 'title' : rtmp_cmd.splitlines()[0], 'data' : None, 'list' : [ {'title' : 'play', 'data' : rtmp_cmd.splitlines()[1]}, {'title' : 'download', 'data' : rtmp_cmd.splitlines()[1]}]}) play_menu.set_data(streams) play_menu.set_focus(True) menu.set_focus(False) elif not play_menu.push_char(c): title = play_menu.get_current('title').encode('utf-8') data = play_menu.get_current('data') if title == "play" and data: data.encode('utf-8') rtmpdump_p = subprocess.Popen(shlex.split(data), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) mplayer_p = subprocess.Popen(['mplayer', '-'], shell=False, stdin=rtmpdump_p.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) elif title == "download" and data: print_help('Ange filnamn - avsluta med Enter:\n>') curses.echo() file_name = help_win.getstr(1, 2) curses.noecho() #print_help('Downloading ' + file_name) thread.start_new_thread(download, (file_name, data)) else: play_menu.set_data([]) play_menu.set_focus(False) menu.set_focus(True) curses.doupdate() restore_screen()
"http://www.tv3play.se/rss/mostviewed", "http://www.tv4play.se/rss/dokumentarer", "http://www.kanal5play.se/rss?type=PROGRAM", "http://www.tv6play.se/rss/mostviewed", "http://www.tv8play.se/rss/recent", "http://www.kanal9play.se/rss?type=PROGRAM", "http://www.aftonbladet.se/webbtv/rss.xml", "http://vimeo.com/channels/mvod/videos/rss", ] for feed_url in feed_urls: f = urlopen(feed_url) tree = etree.parse(f) url = tree.xpath("/rss/channel/item[1]/link/text()")[0] network = tree.xpath("/rss/channel/title/text()")[0] title = tree.xpath("/rss/channel/item[1]/title/text()")[0] print(ansi["blue"] + network) print(title + ansi["reset"]) try: cmds = remove_duplicates(generate_getcmd(url, True, output_file="-")) if len(cmds) > 0: print(ansi["green"] + "Fine!" + ansi["reset"]) for line in cmds[0].splitlines(): print("\t" + line[:80]) system('ffplay -v quiet "%s"' % cmds[0].splitlines()[1]) else: print("%sNothing found for %s!%s" % (ansi["red"], title, ansi["reset"])) except: print("%s%s broken!%s" % (ansi["red"], title, ansi["reset"])) print("\t" + format_exc().replace("\n", "\n\t"))