def test(app, poll='/', context=lambda: mock.patch.object(mock, '_fake_', create=True), use_thread=False): port = util.net.free_port() url = 'http://0.0.0.0:{}'.format(port) def run(): with context(): if isinstance(app, tornado.web.Application): app.listen(port) else: app().listen(port) if not use_thread: tornado.ioloop.IOLoop.current().start() proc = (pool.thread.new if use_thread else pool.proc.new)(run) if poll: wait_for_http(url + poll) try: yield url except: raise finally: if not use_thread: proc.terminate()
def test_normal_app(): async def handler(req): return {'body': 'asdf'} port = util.net.free_port() web.app([('/', {'get': handler})]).listen(port) proc = pool.proc.new(tornado.ioloop.IOLoop.current().start) url = f'http://0.0.0.0:{port}' assert web.get_sync(url)['body'] == 'asdf' proc.terminate()
def test_normal_app(): @tornado.gen.coroutine def handler(req): yield None return {'body': 'asdf'} port = util.net.free_port() web.app([('/', {'get': handler})]).listen(port) proc = pool.proc.new(tornado.ioloop.IOLoop.current().start) url = 'http://0.0.0.0:{port}'.format(**locals()) assert web.get_sync(url)['body'] == 'asdf' proc.terminate()
def servers(timeout=30, extra_conf='', num_servers=3): util.log.setup(format='%(message)s') shell.set['stream'] = True with util.time.timeout(timeout): with shell.stream(): with shell.tempdir(): procs = start_all(extra_conf, num_servers) watch = [True] pool.thread.new(watcher, watch, procs) try: yield finally: watch[0] = False for proc in procs: proc.terminate()
def server(): port = 8080 proc = pool.proc.new(sh.run, 'python src/server.py --port', port) addr = f'http://localhost:{port}' try: for _ in range(100): try: requests.get(addr) except: time.sleep(.01) else: break else: assert False, 'server never came up' yield addr finally: proc.terminate()
def _put(queue_name, url, seconds, parallelism): def worker(): while True: resp = requests.post('%s/put?queue=%s' % (url, queue_name), data=json.dumps({ 'item': 'foo', 'data': 'asdfasdf' * 10 })) assert resp.status_code == 200 stats = requests.get('%s/stats' % url).json() procs = [pool.proc.new(worker) for _ in range(parallelism)] time.sleep(seconds) for proc in procs: proc.terminate() new_stats = requests.get('%s/stats' % url).json() total_put = new_stats[queue_name]['num-queued'] - stats.get( queue_name, {}).get('num-queued', 0) res = 'put %s items\n' % total_put return res + '%s puts/second\n' % int(total_put / float(seconds))
def start_all(extra='', num=3): ports = [util.net.free_port() for _ in range(num)] conf = os.environ['S4_CONF_PATH'] = os.path.abspath(run('mktemp -p .')) with open(conf, 'w') as f: f.write('\n'.join(f'0.0.0.0:{port}' for port in ports) + '\n') procs = [pool.proc.new(start, port, conf, extra) for port in ports] try: for _ in range(50): try: for port in ports: requests.get(f'http://0.0.0.0:{port}') break except: time.sleep(.1) else: assert False, 'failed to start servers' except: for proc in procs: proc.terminate() raise else: return procs