def parser_info(self, video, info, stream, lvid, uid): if not 'allot' in info or lvid != info['id']: return stream_id = self.types_2_id[stream] stream_profile = self.id_2_profile[stream_id] host = info['allot'] prot = info['prot'] tvid = info['tvid'] data = info['data'] size = sum(map(int,data['clipsBytes'])) urls = [] assert len(data['clipsURL']) == len(data['clipsBytes']) == len(data['su']) for new, clip, ck, in zip(data['su'], data['clipsURL'], data['ck']): params = { 'vid': self.vid, 'tvid': tvid, 'file': urlparse(clip).path, 'new': new, 'key': ck, 'uid': uid, 't': random(), 'prod': 'h5', 'prot': prot, 'pt': 1, 'rb': 1, } if urlparse(new).netloc == '': cdnurl = 'https://'+host+'/cdnList?' + urlencode(params) url = json.loads(get_content(cdnurl))['url'] else: url = new urls.append(url) video.streams[stream_id] = {'container': 'mp4', 'video_profile': stream_profile, 'src': urls, 'size' : size} video.stream_types.append(stream_id)
def download(urls, name, ext, live = False): # ffmpeg can't handle local m3u8. # only use ffmpeg to hanle m3u8. global m3u8_internal # for live video, always use ffmpeg to rebuild timeline. if live: m3u8_internal = False # rebuild m3u8 urls when use internal downloader, # change the ext to segment's ext, default is "ts", # otherwise change the ext to "mp4". if ext == 'm3u8': if m3u8_internal: urls = load_m3u8(urls[0]) ext = urlparse(urls[0])[2].split('.')[-1] if ext not in ['ts', 'm4s', 'mp4']: ext = 'ts' else: ext = 'mp4' # OK check m3u8_internal if not m3u8_internal: launch_ffmpeg_download(urls[0], name + '.' + ext, live) else: if save_urls(urls, name, ext, jobs = args.jobs): lenth = len(urls) if lenth > 1 and not args.no_merge: launch_ffmpeg(name, ext,lenth) clean_slices(name, ext,lenth) else: logger.critical("{}> donwload failed".format(name))
def parser_info(self, video, info, stream, lvid): if not 'allot' in info: return stream_id = self.types_2_id[stream] stream_profile = self.types_2_profile[stream] host = info['allot'] prot = info['prot'] tvid = info['tvid'] data = info['data'] size = sum(map(int, data['clipsBytes'])) assert len(data['clipsURL']) == len(data['clipsBytes']) == len( data['su']) for new, clip, ck, in zip(data['su'], data['clipsURL'], data['ck']): clipURL = urlparse(clip).path self.realurls[stream_id].append('http://' + host + '/?prot=9&prod=flash&pt=1&file=' + clipURL + '&new=' + new + '&key=' + ck + '&vid=' + str(self.vid) + '&uid=' + str(int(time.time() * 1000)) + '&t=' + str(random()) + '&rb=1') video.streams[stream_id] = { 'container': 'mp4', 'video_profile': stream_profile, 'size': size } video.stream_types.append(stream_id) self.extract_single(video, stream_id)
def parser_info(self, video, info, stream, lvid, uid): if not 'allot' in info or lvid != info['id']: return stream_id = self.types_2_id[stream] stream_profile = self.id_2_profile[stream_id] host = info['allot'] prot = info['prot'] tvid = info['tvid'] data = info['data'] size = sum(map(int, data['clipsBytes'])) assert len(data['clipsURL']) == len(data['clipsBytes']) == len( data['su']) for new, clip, ck, in zip(data['su'], data['clipsURL'], data['ck']): params = { 'vid': self.vid, 'tvid': tvid, 'file': urlparse(clip).path, 'new': new, 'key': ck, 'uid': uid, 't': random(), 'prod': 'h5', 'prot': prot, 'pt': 1, 'rb': 1, } self.realurls[stream_id].append('https://' + host + '/cdnList?' + urlencode(params)) video.streams[stream_id] = { 'container': 'mp4', 'video_profile': stream_profile, 'size': size } video.stream_types.append(stream_id) self.extract_single(video, stream_id)
def prepare(self): info = VideoInfo(self.name) if self.url and not self.vid: self.vid = match1(self.url, 'http://v.ku6.com/special/show_\d+/(.*)\.html', 'http://v.ku6.com/show/(.*)\.html', 'http://my.ku6.com/watch\?.*v=(.*).*') video_data = json.loads( get_content('http://v.ku6.com/fetchVideo4Player/%s.html' % self.vid)) data = video_data['data'] assert video_data['status'] == 1, '%s : %s' % (self.name, data) info.title = data['t'] f = data['f'] urls = f.split(',') ext = urlparse(urls[0]).path.split('.')[-1] assert ext in ('flv', 'mp4', 'f4v'), ext ext = {'f4v': 'flv'}.get(ext, ext) size = 0 for url in urls: _, _, temp = url_info(url) size += temp info.streams['current'] = {'container': ext, 'src': urls, 'size': size} info.stream_types.append('current') return info
def parser_info(self, video, info, stream, lvid, uid): if not 'allot' in info or lvid != info['id']: return stream_id = self.types_2_id[stream] stream_profile = self.id_2_profile[stream_id] host = info['allot'] data = info['data'] size = sum(map(int, data['clipsBytes'])) urls = [] assert len(data['clipsURL']) == len(data['clipsBytes']) == len( data['su']) for new, ck, in zip(data['su'], data['ck']): params = { 'ch': data['ch'], 'num': data['num'], 'new': new, 'key': ck, 'uid': uid, 'prod': 'h5n', 'pt': 1, 'pg': 2, } if urlparse(new).netloc == '': cdnurl = 'https://{}/ip?{}'.format(host, urlencode(params)) url = json.loads(get_content(cdnurl))['servers'][0]['url'] else: url = new urls.append(url) video.streams[stream_id] = { 'container': 'mp4', 'video_profile': stream_profile, 'src': urls, 'size': size } video.stream_types.append(stream_id)
def parser_info(self, info, stream, lvid): if not 'allot' in info: return host = info['allot'] prot = info['prot'] tvid = info['tvid'] data = info['data'] size = sum(map(int,data['clipsBytes'])) assert len(data['clipsURL']) == len(data['clipsBytes']) == len(data['su']) for new, clip, ck, in zip(data['su'], data['clipsURL'], data['ck']): clipURL = urlparse(clip).path self.realurls[stream].append('http://'+host+'/?prot=9&prod=flash&pt=1&file='+clipURL+'&new='+new +'&key='+ ck+'&vid='+str(self.vid)+'&uid='+str(int(time.time()*1000))+'&t='+str(random())+'&rb=1') self.streams[stream] = {'container': 'mp4', 'video_profile': stream, 'size' : size} self.stream_types.append(stream)
def main(): arg_parser() if not args.debug: logging.root.setLevel(logging.WARNING) else: logging.root.setLevel(logging.DEBUG) if args.timeout: socket.setdefaulttimeout(args.timeout) if args.proxy == 'system': proxy_handler = ProxyHandler() args.proxy = os.environ.get('HTTP_PROXY', 'none') elif args.proxy.upper().startswith('SOCKS'): try: import socks from sockshandler import SocksiPyHandler except ImportError: logger.error('To use SOCKS proxy, please install PySocks first!') raise parsed_socks_proxy = urlparse(args.proxy) sockstype = socks.PROXY_TYPES[parsed_socks_proxy.scheme.upper()] rdns = True proxy_handler = SocksiPyHandler(sockstype, parsed_socks_proxy.hostname, parsed_socks_proxy.port, rdns, parsed_socks_proxy.username, parsed_socks_proxy.password) else: proxy_handler = ProxyHandler({'http': args.proxy, 'https': args.proxy}) if not args.proxy == 'none': opener = build_opener(proxy_handler) install_opener(opener) default_proxy_handler[:] = [proxy_handler] #mkdir and cd to output dir if not args.output_dir == '.': try: if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) except: logger.warning("No permission or Not found " + args.output_dir) logger.warning("use current folder") args.output_dir = '.' if os.path.exists(args.output_dir): os.chdir(args.output_dir) try: exit = 0 for url in args.video_urls: try: m, u = url_to_module(url) if args.playlist: parser = m.parser_list else: parser = m.parser info = parser(u) if type(info) is types.GeneratorType or type(info) is list: ind = 0 for i in info: if ind < args.start: ind += 1 continue handle_videoinfo(i, index=ind) ind += 1 else: handle_videoinfo(info) except AssertionError as e: logger.critical(compact_str(e)) exit = 1 except (RuntimeError, NotImplementedError, SyntaxError) as e: logger.error(compact_str(e)) exit = 1 sys.exit(exit) except KeyboardInterrupt: logger.info('Interrupted by Ctrl-C')