def daemonize(func, pid_file=None, log_file=None, debug=False): if not debug: pid = os.fork() if pid: sys.exit(0) os.chdir('/') os.umask(0) os.setsid() _pid = os.fork( ) # Fork again, the grandson process is daemon process now. if _pid: sys.exit(0) sys.stdout.flush() sys.stderr.flush() # Close read/write with open('/dev/null') as read_null, open('/dev/null', 'w') as write_null: os.dup2(read_null.fileno(), sys.stdin.fileno()) os.dup2(write_null.fileno(), sys.stdout.fileno()) os.dup2(write_null.fileno(), sys.stderr.fileno()) if pid_file: with open(pid_file, 'w+') as f: f.write(str(os.getpid())) # Register exit function to remove pid file atexit.register(os.remove, pid_file) # Do works session = AsyncSession(n=(os.cpu_count() * 2)) func.session = session func.log_file = log_file session.run(func)
from requests_threads import AsyncSession session = AsyncSession(n=100) async def _main(): rs = [] for _ in range(100): rs.append(await session.get('http://httpbin.org/get')) print(rs) if __name__ == '__main__': session.run(_main)
'Content-Type': 'application/json' } urls = pd.read_csv(args.csv) # urls = urls.head(5500) print(urls.head()) # response = requests.post('https://api.getnetra.com/image-detection/process/brands', # headers=headers, # json=request) session = AsyncSession(n=100) async def _main(): rs = [] for line in urls.iterrows(): img_url = line[1]['url'] request = { 'image_url': img_url, 'callback_url': CALLBACK_URL } rs.append(await session.post('https://api.getnetra.com/image-detection/process/all', headers=headers, json=request)) time.sleep(0.004) for res in rs: print(res.text) session.run(_main) # print('Sent request:', response.status_code, response.text)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) url = "http://52.186.121.126:8000" n = 0 async def main(): while True: await session.get(url) global n n += 1 def monitor(): while True: time.sleep(1) global n print(f"{n} reqs/sec") n = 0 if __name__ == "__main__": loop = asyncio.get_event_loop() session = AsyncSession(n=1000, loop=loop) try: loop.run_in_executor(None, monitor) # loop.run_until_complete(main()) session.run(main) except KeyboardInterrupt: loop.close()
def test__add_location__valid__async_with_uuid(self): l.log(self.dbg, "entering: test__add_location__valid__async_with_uuid") for _ in range(10): # NOTE: run 10 random iterations to for robustness l.log(self.dbg, "\tstarting round {}".format(_)) l.log(self.dbg, "\tresetting the database") reset.auto_reset() # NOTE: reset the database l.log(self.dbg, "\tadding a user") def ex(w): os.dup2(w.fileno(), 1) app.app.run() a, b = multiprocessing.Pipe() p = multiprocessing.Process(target=ex, args=(a, )) log = logging.getLogger('werkzeug') log.disabled = True p.start() time.sleep(2) url = "http://" + "127.0.0.1:5000" + endpoint session = AsyncSession(n=100) async def test(): _type = random.choice(lconst.USER_TYPES) username = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint(lconst.MIN_USERNAME_LEN, lconst.MAX_USERNAME_LEN)) ]) password_hash = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint(lconst.MIN_PASSWORD_HASH_LEN, lconst.MAX_PASSWORD_HASH_LEN)) ]) derived_user_id = ludeim.generate_user_user_id( username, password_hash) await (session.post(url=url, json={ "jsonrpc": "2.0", "method": "add_user", "params": { "type": _type, "username": username, "password_hash": password_hash }, "id": 1 })) l.log(self.dbg, "\tadding 10 locations asynchronously") resps = [] for _ in range(10): _type = random.choice(lconst.LOCATION_TYPES) name = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint( 5, 50 ) # TODO: formalize bounds for location names ) ]) address = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint( 5, 50) # TODO: formalize bounds for addresses ) ]) latitude = float( random.randint(1, 360) / random.randint( 1, 360)) # TODO: formalize bounds for latitude longitude = float( random.randint(1, 360) / random.randint( 1, 360)) # TODO: formalize bounds for longitude details = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint( 0, 100) # TODO: formalize bounds for details ) ]) rep_title = random.choice(lconst.TITLES) rep_first_name = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint( 5, 20) # TODO: formalize bounds for first names ) ]) rep_last_name = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint( 5, 20) # TODO: formalize bounds for last names ) ]) rep_contact_info = "".join([ random.choice(string.ascii_letters + string.digits) for _ in range( random.randint( 5, 20) # TODO: formalize bounds for contact info ) ]) payload = { "jsonrpc": "2.0", "method": "add_location", "params": { "user_id": derived_user_id, "type": _type, "name": name, "address": address, "latitude": latitude, "longitude": longitude, "details": details, "representative": { "title": rep_title, "first_name": rep_first_name, "last_name": rep_last_name, "contact_info": rep_contact_info } }, "id": 1 } resps.append(session.post(url=url, json=payload)) for i in range(len(resps)): resps[i] = await resps[i] resps = [r.json() for r in resps] acc = True for r in resps: acc = acc and "result" in r l.log(self.dbg, "\tasserting all locations were added") self.assertTrue(acc, msg="a request errored") db_resp = db.get_connection().execute( """SELECT * FROM locations""").fetchall() self.assertEqual( len(db_resp), 10, "not all locations got saved to the database") try: session.run(test) except twisted.internet.error.ReactorNotRestartable: pass except SystemExit: # NOTE: requests_threads is experimental and currently always exits with a hard sys exit pass p.kill() l.log(self.dbg, "\tending round {}\n".format(_))
logger = logging.getLogger() parser = argparse.ArgumentParser(description='Concurrent Traffic Generator') parser.add_argument('concurrent', type=int, help='Number of Concurrent Requests') parser.add_argument('total', type=int, help='Total number of Requests to Make') parser.add_argument('url', type=str, help='URL to fetch') args = parser.parse_args() NODE_URL = f"http://{os.environ['NODE_API_SERVICE_HOST']}:{os.environ['NODE_API_SERVICE_PORT']}" asyncio.set_event_loop(asyncio.new_event_loop()) session = AsyncSession(n=args.concurrent) Pin.override(session, service='concurrent-requests-generator') async def generate_requests(): with tracer.trace('flask.request', service='concurrent-requests-generator') as span: rs = [] for _ in range(args.total): rs.append(session.get(NODE_URL + args.url)) for i in range(args.total): rs[i] = await rs[i] print(rs) session.run(generate_requests) session.close()
for _ in range(number_or_repetitions): futures = [ loop.run_in_executor(None, requests.get, url) for url in urls ] for future in futures: await future print('Average elapsed time asyncio = {}'.format( (time.time() - start) / number_or_repetitions)) async def requests_threads_benchmark(): start = time.time() for _ in range(number_or_repetitions): for url in urls: await session.get(url) print('Average elapsed time requests_threads = {}'.format( (time.time() - start) / number_or_repetitions)) if __name__ == '__main__': requests_benchmark() grequests_benchmark() loop = asyncio.get_event_loop() loop.run_until_complete(asyncio_benchmark()) session = AsyncSession(n=100) session.run(requests_threads_benchmark)