def recode(url, duration=None, vfmt=2, outpath='./', npf=3, freq=10, tmin=5, tmax=20, proxy=None, log=None): assert duration is None or duration > 0 name = '%s.%s.ts' % (_util.get_time_string(), hash(url)) outfile = pjoin(outpath, name) log.info("|=> begin: %s", url) if duration: log.info("|=>duration: %d", duration) log.info("|=> output: %s", outfile) _util.assure_path(outpath) axel = WorkShop(tmin=tmin, tmax=tmax, log=log) m3u8 = M3u8Stream(axel=axel, proxy=proxy,log=log) fetcher = HttpFetcher() start_at = time.time() try: with open(outfile, 'wb') as fp: if url.find('m3u8') > 0 or __is_url_file(url): axel.serve() m3u8.recode(url=url, duration=duration, vfmt=vfmt, fp=fp, npf=npf, freq=cfg.freq) else: fetcher.fetch(url=url, fp=fp) log.info("|=> end: total=%.2fs, out=%s", time.time() - start_at, outfile) finally: if axel.isAlive(): axel.setToStop() axel.join()
def main(cfg, log): if cfg.playlist: for url in cfg.urls: outpath, cfg.urls = parsers.getPlayListParser(url).info(url) cfg.outpath = pjoin(cfg.outpath, outpath) util.assure_path(cfg.outpath) with open(pjoin(cfg.outpath, 'url.txt'), 'w') as fp: fp.writelines([url + "\n\n"]) for i, clip in enumerate(cfg.urls): fp.writelines(["[%03d] %s\n"%(i, clip)]) bar = ProgressBar() ws = WorkShop(tmin=cfg.tmin, tmax=cfg.tmax, log=log) dlvs = [] for i, url in enumerate(cfg.urls): dlvideo = VUrlTask(url, vidfmt=cfg.vidfmt, npf=cfg.npf, outpath=cfg.outpath, bar=bar, log=log) dlvs.append(dlvideo) try: ws.serve() ws.addTasks(dlvs) while len(dlvs) > 0: for i, dlv in enumerate(dlvs): if dlv.isArchived() or dlv.isError(): del dlvs[i] _sleep(1) except Exception as e: log.exception(e) finally: ws.setToStop() ws.join()
def main(cfg, log): if cfg.playlist: for url in cfg.urls: outpath, cfg.urls = parsers.getPlayListParser(url).info(url) cfg.outpath = pjoin(cfg.outpath, outpath) util.assure_path(cfg.outpath) with open(pjoin(cfg.outpath, 'url.txt'), 'w') as fp: fp.writelines([url + "\n\n"]) for i, clip in enumerate(cfg.urls): fp.writelines(["[%03d] %s\n" % (i, clip)]) bar = ProgressBar() ws = WorkShop(tmin=cfg.tmin, tmax=cfg.tmax, log=log) dlvs = [] for i, url in enumerate(cfg.urls): dlvideo = VUrlTask(url, vidfmt=cfg.vidfmt, npf=cfg.npf, outpath=cfg.outpath, bar=bar, log=log) dlvs.append(dlvideo) try: ws.serve() ws.addTasks(dlvs) while len(dlvs) > 0: for i, dlv in enumerate(dlvs): if dlv.isArchived() or dlv.isError(): del dlvs[i] _sleep(1) except Exception as e: log.exception(e) finally: ws.setToStop() ws.join()
def recode(url, duration=None, vfmt=2, outpath='./', npf=3, freq=10, tmin=5, tmax=20, proxy=None, log=None): assert duration is None or duration > 0 name = '%s.%s.ts' % (_util.get_time_string(), hash(url)) outfile = pjoin(outpath, name) log.info("|=> begin: %s", url) if duration: log.info("|=>duration: %d", duration) log.info("|=> output: %s", outfile) _util.assure_path(outpath) axel = WorkShop(tmin=tmin, tmax=tmax, log=log) m3u8 = M3u8Stream(axel=axel, proxy=proxy, log=log) fetcher = HttpFetcher() start_at = time.time() try: with open(outfile, 'wb') as fp: if url.find('m3u8') > 0 or __is_url_file(url): axel.serve() m3u8.recode(url=url, duration=duration, vfmt=vfmt, fp=fp, npf=npf, freq=cfg.freq) else: fetcher.fetch(url=url, fp=fp) log.info("|=> end: total=%.2fs, out=%s", time.time() - start_at, outfile) finally: if axel.isAlive(): axel.setToStop() axel.join()
def __makeSubWorks(self): urls, npf, headers = \ self.parse_url(self.url, self.vidfmt, self.npf, self.outpath) if len(urls) == 0: self.log.info('[VUrlTask] not a video page, %s', self.url) return if pexists(self.outname): self.log.info('[VUrlTask] out put file exists, %s', self.outname) return util.assure_path(self.tmpdir) with open(self.__task_history, 'w') as fp: fp.write(self.url) self.log.debug('[VUrlTask] OUT FILE: %s', self.outname) self.log.debug('[VUrlTask] TMP DIR: %s', self.tmpdir) self.__makeSubTasks(urls, headers, npf) if len(self.__subtasks) == 0: self.cleanup() return subworks = [] for tsk in self.__subtasks: for wk in tsk.subWorks: subworks.append(wk) self.addSubWorks(subworks)
def test_assure_path(self): print 'test_assure_path' tmp_path = r'./tmp/a/b/c/d' util.assure_path(tmp_path) self.assertTrue(os.path.exists(tmp_path))