def download(self, params, ep): params['logger'] = FakeLogger() ydl = YoutubeDL(params) downloader = HttpFD(ydl, params) filename = 'testfile.mp4' try_rm(encodeFilename(filename)) self.assertTrue(downloader.real_download(filename, { 'url': 'http://127.0.0.1:%d/%s' % (self.port, ep), })) self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE) try_rm(encodeFilename(filename))
def _download_restricted(url, filename, age): """ Returns true iff the file has been downloaded """ params = { 'age_limit': age, 'skip_download': True, 'writeinfojson': True, "outtmpl": "%(id)s.%(ext)s", } ydl = YoutubeDL(params) ydl.add_default_info_extractors() json_filename = filename + '.info.json' try_rm(json_filename) ydl.download([url]) res = os.path.exists(json_filename) try_rm(json_filename) return res
def _download_restricted(url, filename, age): """ Returns true if the file has been downloaded """ params = { 'age_limit': age, 'skip_download': True, 'writeinfojson': True, "outtmpl": "%(id)s.%(ext)s", } ydl = YoutubeDL(params) ydl.add_default_info_extractors() json_filename = os.path.splitext(filename)[0] + '.info.json' try_rm(json_filename) ydl.download([url]) res = os.path.exists(json_filename) try_rm(json_filename) return res
def _download_restricted(url, filename, age): """ Returns true if the file has been downloaded """ params = { 'age_limit': age, 'skip_download': True, 'writeinfojson': True, 'outtmpl': '%(id)s.%(ext)s', } ydl = AnanseDl(params) ydl.add_default_info_extractors() json_filename = os.path.splitext(filename)[0] + '.info.json' try_rm(json_filename) ydl.download([url]) res = os.path.exists(json_filename) try_rm(json_filename) return res
def try_rm_tcs_files(tcs=None): if tcs is None: tcs = test_cases for tc in tcs: tc_filename = get_tc_filename(tc) try_rm(tc_filename) try_rm(tc_filename + '.part') try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
def tearDown(self): try_rm(ANNOTATIONS_FILE)
def try_rm_tcs_files(): for tc in test_cases: tc_filename = get_tc_filename(tc) try_rm(tc_filename) try_rm(tc_filename + '.part') try_rm(os.path.splitext(tc_filename)[0] + '.info.json')
def tearDown(self): try_rm(join(root_dir, 'test.webm'))
def test_template(self): ie = youtube_dl.extractor.get_info_extractor(test_case['name']) def print_skipping(reason): print('Skipping %s: %s' % (test_case['name'], reason)) if not ie._WORKING: print_skipping('IE marked as not _WORKING') return if 'playlist' not in test_case and not test_case['file']: print_skipping('No output file specified') return if 'skip' in test_case: print_skipping(test_case['skip']) return params = get_params(test_case.get('params', {})) ydl = YoutubeDL(params) ydl.add_default_info_extractors() finished_hook_called = set() def _hook(status): if status['status'] == 'finished': finished_hook_called.add(status['filename']) ydl.fd.add_progress_hook(_hook) test_cases = test_case.get('playlist', [test_case]) for tc in test_cases: try_rm(tc['file']) try_rm(tc['file'] + '.part') try_rm(tc['file'] + '.info.json') try: for retry in range(1, RETRIES + 1): try: ydl.download([test_case['url']]) except (DownloadError, ExtractorError) as err: if retry == RETRIES: raise # Check if the exception is not a network related one if not err.exc_info[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError): raise print( 'Retrying: {0} failed tries\n\n##########\n\n'.format( retry)) else: break for tc in test_cases: if not test_case.get('params', {}).get('skip_download', False): self.assertTrue(os.path.exists(tc['file']), msg='Missing file ' + tc['file']) self.assertTrue(tc['file'] in finished_hook_called) self.assertTrue(os.path.exists(tc['file'] + '.info.json')) if 'md5' in tc: md5_for_file = _file_md5(tc['file']) self.assertEqual(md5_for_file, tc['md5']) with io.open(tc['file'] + '.info.json', encoding='utf-8') as infof: info_dict = json.load(infof) for (info_field, expected) in tc.get('info_dict', {}).items(): if isinstance(expected, compat_str) and expected.startswith('md5:'): got = 'md5:' + md5(info_dict.get(info_field)) else: got = info_dict.get(info_field) self.assertEqual( expected, got, u'invalid value for field %s, expected %r, got %r' % (info_field, expected, got)) # If checkable fields are missing from the test case, print the info_dict test_info_dict = dict( (key, value if not isinstance(value, compat_str) or len(value) < 250 else 'md5:' + md5(value)) for key, value in info_dict.items() if value and key in ('title', 'description', 'uploader', 'upload_date', 'uploader_id', 'location')) if not all(key in tc.get('info_dict', {}).keys() for key in test_info_dict.keys()): sys.stderr.write(u'\n"info_dict": ' + json.dumps( test_info_dict, ensure_ascii=False, indent=2) + u'\n') # Check for the presence of mandatory fields for key in ('id', 'url', 'title', 'ext'): self.assertTrue(key in info_dict.keys() and info_dict[key]) finally: for tc in test_cases: try_rm(tc['file']) try_rm(tc['file'] + '.part') try_rm(tc['file'] + '.info.json')
def tearDown(self): for f in self.files: try_rm(f)
def try_rm_tcs_files(): for tc in test_cases: tc_filename = get_tc_filename(tc) try_rm(tc_filename) try_rm(tc_filename + '.part') try_rm(tc_filename + '.info.json')
def test_template(self): ie = youtube_dl.extractor.get_info_extractor(test_case['name']) def print_skipping(reason): print('Skipping %s: %s' % (test_case['name'], reason)) if not ie._WORKING: print_skipping('IE marked as not _WORKING') return if 'playlist' not in test_case and not test_case['file']: print_skipping('No output file specified') return if 'skip' in test_case: print_skipping(test_case['skip']) return params = get_params(test_case.get('params', {})) ydl = YoutubeDL(params) ydl.add_default_info_extractors() finished_hook_called = set() def _hook(status): if status['status'] == 'finished': finished_hook_called.add(status['filename']) ydl.fd.add_progress_hook(_hook) test_cases = test_case.get('playlist', [test_case]) for tc in test_cases: try_rm(tc['file']) try_rm(tc['file'] + '.part') try_rm(tc['file'] + '.info.json') try: for retry in range(1, RETRIES + 1): try: ydl.download([test_case['url']]) except (DownloadError, ExtractorError) as err: if retry == RETRIES: raise # Check if the exception is not a network related one if not err.exc_info[0] in (compat_urllib_error.URLError, socket.timeout, UnavailableVideoError): raise print('Retrying: {0} failed tries\n\n##########\n\n'.format(retry)) else: break for tc in test_cases: if not test_case.get('params', {}).get('skip_download', False): self.assertTrue(os.path.exists(tc['file']), msg='Missing file ' + tc['file']) self.assertTrue(tc['file'] in finished_hook_called) self.assertTrue(os.path.exists(tc['file'] + '.info.json')) if 'md5' in tc: md5_for_file = _file_md5(tc['file']) self.assertEqual(md5_for_file, tc['md5']) with io.open(tc['file'] + '.info.json', encoding='utf-8') as infof: info_dict = json.load(infof) for (info_field, expected) in tc.get('info_dict', {}).items(): if isinstance(expected, compat_str) and expected.startswith('md5:'): got = 'md5:' + md5(info_dict.get(info_field)) else: got = info_dict.get(info_field) self.assertEqual(expected, got, u'invalid value for field %s, expected %r, got %r' % (info_field, expected, got)) # If checkable fields are missing from the test case, print the info_dict test_info_dict = dict((key, value if not isinstance(value, compat_str) or len(value) < 250 else 'md5:' + md5(value)) for key, value in info_dict.items() if value and key in ('title', 'description', 'uploader', 'upload_date', 'uploader_id', 'location')) if not all(key in tc.get('info_dict', {}).keys() for key in test_info_dict.keys()): sys.stderr.write(u'\n"info_dict": ' + json.dumps(test_info_dict, ensure_ascii=False, indent=2) + u'\n') # Check for the presence of mandatory fields for key in ('id', 'url', 'title', 'ext'): self.assertTrue(key in info_dict.keys() and info_dict[key]) finally: for tc in test_cases: try_rm(tc['file']) try_rm(tc['file'] + '.part') try_rm(tc['file'] + '.info.json')