def setUp(self): interface.call('patch', 'remove_source', erase=True) try: shutil.rmtree(settings.external_plugins) except: pass print "up"
def setUp(self): interface.call("patch", "remove_source", erase=True) try: shutil.rmtree(settings.external_plugins) except: pass print "up"
def finish_download(self, type, speed=0): print "!!!", '{} #finish'.format(type) interface.call('config', 'set', key='download.rate_limit', value=speed) self.file_enabled(True) event.wait_for_events(['file:download_complete'], 10) self.assert_download_complete() debugtools.assert_file_checksum('md5', self.file.get_complete_file(), httpserver.md5_10mb) self.del_link()
def _test_rar_multipart_start_stop(): print '-' * 100, 'test_rar_multipart_start_stop' files = list() add_files(files, 'download_complete', 'foo', 'foo') gevent.spawn_later(1.5, interface.call, 'core', 'stop') event.wait_for_events(['download:stopped'], 5) interface.call('core', 'printr') gevent.sleep(1) interface.call('core', 'start') event.wait_for_events( ['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: files[1].state = 'download_complete' event.wait_for_events( ['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: files[2].state = 'download_complete' event.wait_for_events( ['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') gevent.sleep(0.1) for f in files: f.join() assert f.last_error is None assert f.working is False assert f.state == 'rarextract_complete', "expected complete, but is {}".format( f.state) p = os.path.join(files[0].get_extract_path(), "1mb.bin") assert os.path.exists(p) try: debugtools.assert_file_checksum('md5', p, '934a5866d0a738c32f040559eccbf567') finally: os.unlink(p) for f in files: f.delete()
def test_connection_limit(self): interface.call('config', 'set', key='download.max_chunks', value=6) self.testurl = httpserver.url+'/resume/connection_limit/10mb.bin' cache = self._default_flow_test('connection limit', 6, '>=', self._connection_limit_callback) self._connection_limit_callback() for i in range(len(self.file.chunks)): chunk = self.file.chunks[i] assert cache[i]['id'] == chunk.id if chunk.pos >= chunk.begin: assert cache[i]['pos'] <= chunk.pos self.finish_download('connection limit', 5*1024**2) interface.call('config', 'set', key='download.max_chunks', value=DEFAULT_MAX_CHUNKS)
def init(): logger.log_console_level = logger.logging.DEBUG login.module_initialized = Event() settings.db_file = ':memory:' settings.next_uid_file = 0 settings.config_file = None settings.log_file = None settings.init() db.init() config.init() interface.call('config', 'set', key='check.use_cache', value=False) login.set_login('*****@*****.**', 'helloworld')
def _test_rar_multipart_start_stop(): print '-'*100, 'test_rar_multipart_start_stop' files = list() add_files(files, 'download_complete', 'foo', 'foo') gevent.spawn_later(1.5, interface.call, 'core', 'stop') event.wait_for_events(['download:stopped'], 5) interface.call('core', 'printr') gevent.sleep(1) interface.call('core', 'start') event.wait_for_events(['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: files[1].state = 'download_complete' event.wait_for_events(['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: files[2].state = 'download_complete' event.wait_for_events(['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') gevent.sleep(0.1) for f in files: f.join() assert f.last_error is None assert f.working is False assert f.state == 'rarextract_complete', "expected complete, but is {}".format(f.state) p = os.path.join(files[0].get_extract_path(), "1mb.bin") assert os.path.exists(p) try: debugtools.assert_file_checksum('md5', p, '934a5866d0a738c32f040559eccbf567') finally: os.unlink(p) for f in files: f.delete()
def _test_source(self, url, config_url): assert patch.sources == {} interface.call('patch', 'add_source', url=url) source = patch.sources['hoster'] data = source.serialize() debugtools.compare_dict(data, {'branches': [], 'config_url': config_url, 'url': 'http://github.com/downloadam/hoster.git', 'last_error': None, 'enabled': True, 'version': '0000000', 'id': 'hoster'}) try: with Timeout(10): patch.patch_all(external_loaded=False) except Timeout: print "WARNING: patch timed out. ignoring error" return data = source.serialize() debugtools.compare_dict(data, {'config_url': config_url, 'url': 'http://github.com/downloadam/hoster.git', 'last_error': None, 'enabled': True, 'id': 'hoster'}) assert 'master' in data['branches'] assert data['version'] != '0'*7
def _default_flow_test(self, type, num_chunks, chunk_pos='>', callback=None): interface.call('config', 'set', key='download.rate_limit', value=100*1024) print "!!!", '{} #1'.format(type) self.add_link() gevent.sleep(SLEEP[0]) self.file_enabled(False) self.assert_download_incomplete(num_chunks, chunk_pos) cache = [chunk.serialize() for chunk in self.file.chunks] if callback: callback() print "!!!", '{} #2'.format(type) self.file_enabled(True) gevent.sleep(SLEEP[1]) self.file_enabled(False) self.assert_download_incomplete(num_chunks, chunk_pos) return cache
def test_start_stop(self): interface.call('config', 'set', key='download.max_chunks', value=3) interface.call('config', 'set', key='download.rate_limit', value=30*1024) self.testurl = httpserver.url+'/resume/10mb.bin' self.add_link() gevent.sleep(SLEEP[0]) assert len(self.file.chunks) == 3 assert self.file.chunks_working == 3 interface.call('core', 'stop') gevent.sleep(SLEEP[0]) self.assert_download_incomplete(3, '>') assert self.file.chunks_working == 0 interface.call('core', 'start') self.finish_download('resume') interface.call('config', 'set', key='download.max_chunks', value=DEFAULT_MAX_CHUNKS)
def _test_source(self, url, config_url): assert patch.sources == {} interface.call("patch", "add_source", url=url) source = patch.sources["hoster"] data = source.serialize() debugtools.compare_dict( data, { "branches": [], "config_url": config_url, "url": "http://github.com/downloadam/hoster.git", "last_error": None, "enabled": True, "version": "0000000", "id": "hoster", }, ) try: with Timeout(10): patch.patch_all(external_loaded=False) except Timeout: print "WARNING: patch timed out. ignoring error" return data = source.serialize() debugtools.compare_dict( data, { "config_url": config_url, "url": "http://github.com/downloadam/hoster.git", "last_error": None, "enabled": True, "id": "hoster", }, ) assert "master" in data["branches"] assert data["version"] != "0" * 7
def test_proxy(): return #ssh -v -N -D 127.0.0.1:1080 test.domain.com api.init() listener = scheme.PassiveListener('api') scheme.register(listener) type, host, port = 'socks5', '127.0.0.1', 1080 def check_output(text): wc = plugintools.wildcard('<html><head><title>Current IP Check</title></head><body>Current IP Address: *</body></html>') return wc.match(text) resp = requests.get('http://checkip.dyndns.org/') proxyless_text = resp.text.strip() assert check_output(proxyless_text) interface.call('proxy', 'set', type=type, host=host, port=port) data = listener.pop().values()[0] debugtools.compare_dict(data, {'proxy.port': 1080, 'proxy.enabled': True, 'proxy.type': 'socks5', 'proxy.host': '127.0.0.1', 'action': 'update', 'table': 'config'}) resp = requests.get('http://checkip.dyndns.org/') proxy_text = resp.text.strip() assert check_output(proxy_text) assert proxy_text != proxyless_text interface.call('proxy', 'remove') data = listener.pop().values()[0] debugtools.compare_dict(data, {'proxy.port': None, 'proxy.enabled': None, 'proxy.type': None, 'proxy.host': None, 'action': 'update', 'table': 'config'}) resp = requests.get('http://checkip.dyndns.org/') proxyless_text2 = resp.text.strip() assert check_output(proxyless_text2) assert proxy_text != proxyless_text2 assert proxyless_text == proxyless_text2
def test_rar_multipart(): print "-" * 100, 'test_rar_multipart' files = list() add_files(files, 'download_complete', 'download', 'download') event.wait_for_events( ['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: if not "rarextract" in files[1].completed_plugins: print files[1].state files[1].state = 'download_complete' files[1].working = False event.wait_for_events( ['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: if not "rarextract" in files[2].completed_plugins: print files[2].state files[2].state = 'download_complete' files[2].working = False event.wait_for_events( ['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') gevent.sleep(1) for f in files: f.join() assert f.last_error is None assert not f.working, "{} is working".format(f.name) assert f.state == 'rarextract_complete', "expected complete, but is {}".format( f.state) p = os.path.join(files[0].get_extract_path(), "1mb.bin") assert os.path.exists(p) try: debugtools.assert_file_checksum('md5', p, '934a5866d0a738c32f040559eccbf567') finally: os.unlink(p) for f in files: f.delete()
def test_rar_multipart(): print "-"*100, 'test_rar_multipart' files = list() add_files(files, 'download_complete', 'download', 'download') event.wait_for_events(['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: if not "rarextract" in files[1].completed_plugins: print files[1].state files[1].state = 'download_complete' files[1].working = False event.wait_for_events(['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') with transaction: if not "rarextract" in files[2].completed_plugins: print files[2].state files[2].state = 'download_complete' files[2].working = False event.wait_for_events(['rarextract:part_complete', 'rarextract:waiting_for_part'], 5) interface.call('core', 'printr') gevent.sleep(1) for f in files: f.join() assert f.last_error is None assert not f.working, "{} is working".format(f.name) assert f.state == 'rarextract_complete', "expected complete, but is {}".format(f.state) p = os.path.join(files[0].get_extract_path(), "1mb.bin") assert os.path.exists(p) try: debugtools.assert_file_checksum('md5', p, '934a5866d0a738c32f040559eccbf567') finally: os.unlink(p) for f in files: f.delete()
def test_none(self): interface.call("patch", "add_source", url="localhost:4567") assert patch.sources == {}
def tearDown(self): print "down" interface.call("patch", "remove_source", erase=True)
retval = proc.wait() if retval != 0: data = proc.communicate()[0] data = data.splitlines() prefix = "\n{} {}: ".format(plugin, test) msg = "{}{}".format(prefix, prefix.join(data)).strip() with lock: print >>sys.stderr, msg print >>sys.stderr, 'test_hosters', '...', 'FAILED', '...', plugin, test success = False else: print >>sys.stderr, 'test_hosters', '...', 'success', '...', plugin, test if len(sys.argv) == 1: pool = Pool(size=50) plugins = interface.call('hoster', 'list_plugins') for plugin in debugtools.tests.keys(): if not plugin in debugtools.tests: print >>sys.stderr, 'test_hosters', '...', 'MISSING', '...', plugin continue with lock: print >>sys.stderr, 'test_hosters', '...', 'starting', '...', plugin pool.spawn(start_plugin, plugin) pool.join() elif len(sys.argv) == 2: plugin = debugtools.tests[sys.argv[1]] for test in plugin: with lock: print >>sys.stderr, 'test_hosters', '...', 'starting', '...', sys.argv[1], test start_test(sys.argv[1], test)
def test_mirrors(self): interface.call('config', 'set', key='download.max_simultan_downloads', value=4) self.testurl = httpserver.url + '/10mb.bin' interface.call('core', 'add_links', links=[ httpserver.url + '/anyname/mirror1/flow-1.bin', httpserver.url + '/anyname/mirror2/flow-1.bin', httpserver.url + '/anyname/mirror3/flow-1.bin', httpserver.url + '/anyname/mirror4/flow-1.bin' ]) self.wait_check() assert [f.name for f in core.files() ] == ['flow-1.bin', 'flow-1.bin', 'flow-1.bin', 'flow-1.bin'] interface.call('config', 'set', key='download.rate_limit', value=32768) interface.call('core', 'accept_collected') event.wait_for_events(['download:spawn_tasks'], 5) gevent.sleep(0.2) assert len(core._packages) == 1 assert len(core._packages[0].files) == 4 assert sum(1 for f in core.files() if f.working) == 1 interface.call('config', 'set', key='download.rate_limit', value=0) event.wait_for_events(['package:download_complete'], 5) file = [f for f in core.files() if f.state == 'download_complete'][0] assert file.package.state == 'download_complete' for f in core.files(): if f != file: assert f.state == 'download' assert f.enabled is False assert f.last_error.startswith('downloaded via') interface.call('core', 'printr')
def tearDown(self): httpserver.stop() interface.call('core', 'erase_package')
def test_downloads(self): interface.call('config', 'set', key='download.max_simultan_downloads', value=2) self.testurl = httpserver.url+'/10mb.bin' interface.call('core', 'add_links', links=[ httpserver.url+'/anyname/flow-1.bin', httpserver.url+'/anyname/flow-2.bin', httpserver.url+'/anyname/flow-3.bin', httpserver.url+'/anyname/flow-4.bin']) self.wait_check() assert [f.name for f in core.files()] == ['flow-1.bin', 'flow-2.bin', 'flow-3.bin', 'flow-4.bin'] interface.call('config', 'set', key='download.rate_limit', value=32768) interface.call('core', 'accept_collected') event.wait_for_events(['download:spawn_tasks'], 5) gevent.sleep(0.2) assert len(core._packages) == 1 assert len(core._packages[0].files) == 4 assert [f.working for f in core.files()] == [True, True, False, False] interface.call('config', 'set', key='download.rate_limit', value=0) event.wait_for_events(['file:download_complete'], 15) interface.call('config', 'set', key='download.rate_limit', value=32768) # these tests fail randomly so they are disabled (race conditions?!) #assert sum(1 for f in core.files() if f.working) == 1 #assert sum(1 for f in core.files() if f.state == 'download_complete') == 1 #assert sum(1 for f in core.files() if f.state == 'download') == 3 interface.call('config', 'set', key='download.rate_limit', value=0) event.wait_for_events(['package:download_complete'], 15) assert sum(1 for f in core.files() if f.working) == 0 assert sum(1 for f in core.files() if f.state == 'download_complete') == 4 assert sum(1 for f in core.files() if f.last_error) == 0 assert sum(1 for f in core.files() if not f.enabled) == 0 interface.call('core', 'printr')
from client import scheme, loader, event, interface, core import httpserver loader.init() sys.stdout = sys._old_stdout sys.stderr = sys._old_stderr listener = scheme.PassiveListener(['api', 'db']) scheme.register(listener) DEFAULT_MAX_CHUNKS = 2 interface.call('config', 'set', key='download.max_chunks', value=DEFAULT_MAX_CHUNKS) interface.call('config', 'set', key='download.overwrite', value='overwrite') ####################### import socket from client import logger logger.ignore_exceptions.append(socket.error) class Test(object): def setUp(self): httpserver.start() def tearDown(self): httpserver.stop() interface.call('core', 'erase_package')
retval = proc.wait() if retval != 0: data = proc.communicate()[0] data = data.splitlines() prefix = "\n{} {}: ".format(plugin, test) msg = "{}{}".format(prefix, prefix.join(data)).strip() with lock: print >> sys.stderr, msg print >> sys.stderr, 'test_hosters', '...', 'FAILED', '...', plugin, test success = False else: print >> sys.stderr, 'test_hosters', '...', 'success', '...', plugin, test if len(sys.argv) == 1: pool = Pool(size=50) plugins = interface.call('hoster', 'list_plugins') for plugin in debugtools.tests.keys(): if not plugin in debugtools.tests: print >> sys.stderr, 'test_hosters', '...', 'MISSING', '...', plugin continue with lock: print >> sys.stderr, 'test_hosters', '...', 'starting', '...', plugin pool.spawn(start_plugin, plugin) pool.join() elif len(sys.argv) == 2: plugin = debugtools.tests[sys.argv[1]] for test in plugin: with lock: print >> sys.stderr, 'test_hosters', '...', 'starting', '...', sys.argv[ 1], test
from client import debugtools, scheme, loader, event, interface import httpserver loader.init() sys.stdout = sys._old_stdout sys.stderr = sys._old_stderr listener = scheme.PassiveListener(['api', 'db']) scheme.register(listener) DEFAULT_MAX_CHUNKS = 2 SLEEP = (2.5, 1.2) interface.call('config', 'set', key='download.max_chunks', value=DEFAULT_MAX_CHUNKS) interface.call('config', 'set', key='download.overwrite', value='overwrite') ####################### import socket from client import logger logger.ignore_exceptions.append(socket.error) class Test(object): testurl = httpserver.url+'/10mb.bin' def setUp(self): httpserver.start() def tearDown(self):
def fake_msg_reset(input): interface.call('input', 'reset_timeout', id=input.id, timeout=3) assert not input.timeout is None
def tearDown(self): print "down" interface.call('patch', 'remove_source', erase=True)
def test_none(self): interface.call('patch', 'add_source', url='localhost:4567') assert patch.sources == {}
def test_connection_limit_no_resume(self): self.testurl = httpserver.url+'/noresume/connection_limit/10mb.bin' interface.call('config', 'set', key='download.max_chunks', value=5) self._test_no_resume('connection limit + no resume') interface.call('config', 'set', key='download.max_chunks', value=DEFAULT_MAX_CHUNKS)
def test_mirrors(self): interface.call('config', 'set', key='download.max_simultan_downloads', value=4) self.testurl = httpserver.url+'/10mb.bin' interface.call('core', 'add_links', links=[ httpserver.url+'/anyname/mirror1/flow-1.bin', httpserver.url+'/anyname/mirror2/flow-1.bin', httpserver.url+'/anyname/mirror3/flow-1.bin', httpserver.url+'/anyname/mirror4/flow-1.bin']) self.wait_check() assert [f.name for f in core.files()] == ['flow-1.bin', 'flow-1.bin', 'flow-1.bin', 'flow-1.bin'] interface.call('config', 'set', key='download.rate_limit', value=32768) interface.call('core', 'accept_collected') event.wait_for_events(['download:spawn_tasks'], 5) gevent.sleep(0.2) assert len(core._packages) == 1 assert len(core._packages[0].files) == 4 assert sum(1 for f in core.files() if f.working) == 1 interface.call('config', 'set', key='download.rate_limit', value=0) event.wait_for_events(['package:download_complete'], 5) file = [f for f in core.files() if f.state == 'download_complete'][0] assert file.package.state == 'download_complete' for f in core.files(): if f != file: assert f.state == 'download' assert f.enabled is False assert f.last_error.startswith('downloaded via') interface.call('core', 'printr')
def test_downloads(self): interface.call('config', 'set', key='download.max_simultan_downloads', value=2) self.testurl = httpserver.url + '/10mb.bin' interface.call('core', 'add_links', links=[ httpserver.url + '/anyname/flow-1.bin', httpserver.url + '/anyname/flow-2.bin', httpserver.url + '/anyname/flow-3.bin', httpserver.url + '/anyname/flow-4.bin' ]) self.wait_check() assert [f.name for f in core.files() ] == ['flow-1.bin', 'flow-2.bin', 'flow-3.bin', 'flow-4.bin'] interface.call('config', 'set', key='download.rate_limit', value=32768) interface.call('core', 'accept_collected') event.wait_for_events(['download:spawn_tasks'], 5) gevent.sleep(0.2) assert len(core._packages) == 1 assert len(core._packages[0].files) == 4 assert [f.working for f in core.files()] == [True, True, False, False] interface.call('config', 'set', key='download.rate_limit', value=0) event.wait_for_events(['file:download_complete'], 15) interface.call('config', 'set', key='download.rate_limit', value=32768) # these tests fail randomly so it is disabled #assert sum(1 for f in core.files() if f.working) == 1 #assert sum(1 for f in core.files() if f.state == 'download_complete') == 1 assert sum(1 for f in core.files() if f.state == 'download') == 3 interface.call('config', 'set', key='download.rate_limit', value=0) event.wait_for_events(['package:download_complete'], 15) assert sum(1 for f in core.files() if f.working) == 0 assert sum(1 for f in core.files() if f.state == 'download_complete') == 4 assert sum(1 for f in core.files() if f.last_error) == 0 assert sum(1 for f in core.files() if not f.enabled) == 0 interface.call('core', 'printr')