def test_redis_reset(self): storage = RedisStorage("redis://localhost:6379") limiter = FixedWindowRateLimiter(storage) for i in range(0, 10000): rate = RateLimitItemPerMinute(i) limiter.hit(rate) self.assertEqual(storage.reset(), 10000)
async def before_server_start(app_: Sanic, loop): mzk.set_process_name(f'MocaFileLog({core.VERSION}) --- {app_._log_name}') mzk.print_info(f'Starting Sanic server. -- {mzk.get_my_pid()}') app_.system_config: mzk.MocaConfig = mzk.MocaConfig(core.SYSTEM_CONFIG, manual_reload=True) app_.ip_blacklist: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.IP_BLACKLIST_FILE, manual_reload=True, remove_duplicates=True, ) app_.api_key_config: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.API_KEY_FILE, manual_reload=True) app_.dict_cache = {} if app_._log_file_path.startswith('/'): file = app_._log_file_path else: file = core.CLIENT_LOG_DIR.joinpath(app_._log_file_path) app_.moca_log = mzk.MocaFileLog(file, app_._log_level) app_.secure_log = mzk.MocaFileLog(core.LOG_DIR.joinpath('secure.log')) app_.scheduler = mzk.MocaScheduler() app_.log_list = [] if core.SERVER_CONFIG['rate_limiter_redis_storage'] is None: app_._storage_for_rate_limiter = MemoryStorage() else: app_._storage_for_rate_limiter = RedisStorage( core.SERVER_CONFIG['rate_limiter_redis_storage']) app_.rate_limiter = FixedWindowElasticExpiryRateLimiter( app_._storage_for_rate_limiter) if core.LOG_CONFIG[app_._log_name].get('google_spread_sheets_auth', None) is not None: scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] app_._credentials = ServiceAccountCredentials.from_json_keyfile_name( str( core.CONFIG_DIR.joinpath(core.LOG_CONFIG[app_._log_name].get( 'google_spread_sheets_auth'))), scope, ) app_._gc = authorize(app_._credentials) app_.workbook = app_._gc.open_by_key( core.LOG_CONFIG[app_._log_name].get('spread_sheets_key')) else: app_.workbook = None def __reload_timer(application: Sanic) -> None: while True: mzk.sleep(1) application.system_config.reload_file() application.ip_blacklist.reload_file() application.api_key_config.reload_file() app_._timer_thread = Thread(target=__reload_timer, args=(app_, ), daemon=True) app_._timer_thread.start()
def test_fixed_window_with_elastic_expiry_redis(self): storage = RedisStorage('redis://localhost:6379') limiter = FixedWindowElasticExpiryRateLimiter(storage) limit = RateLimitItemPerSecond(10, 2) self.assertTrue(all([limiter.hit(limit) for _ in range(0, 10)])) time.sleep(1) self.assertFalse(limiter.hit(limit)) time.sleep(1) self.assertFalse(limiter.hit(limit))
def test(): storage = RedisStorage("redis://127.0.0.1/3") strategy = FixedWindowElasticExpiryRateLimiter(storage) # one_per_second = RateLimitItemPerSecond(20, 1) one_per_min = RateLimitItemPerMinute(100, 1) r = strategy.hit(one_per_min, "k", "v") if r == True: print(True) else: print("请求满了")
def test_redis(self): storage = RedisStorage("redis://localhost:6379") limiter = FixedWindowRateLimiter(storage) per_min = RateLimitItemPerSecond(10) start = time.time() count = 0 while time.time() - start < 0.5 and count < 10: self.assertTrue(limiter.hit(per_min)) count += 1 self.assertFalse(limiter.hit(per_min)) while time.time() - start <= 1: time.sleep(0.1) self.assertTrue(limiter.hit(per_min))
def test_moving_window_redis(self): storage = RedisStorage("redis://localhost:6379") limiter = MovingWindowRateLimiter(storage) limit = RateLimitItemPerSecond(10, 2) for i in range(0, 10): self.assertTrue(limiter.hit(limit)) self.assertEqual(limiter.get_window_stats(limit)[1], 10 - (i + 1)) time.sleep(2 * 0.095) self.assertFalse(limiter.hit(limit)) time.sleep(0.4) self.assertTrue(limiter.hit(limit)) self.assertTrue(limiter.hit(limit)) self.assertEqual(limiter.get_window_stats(limit)[1], 0)
def test_large_dataset_redis_moving_window_expiry(self): storage = RedisStorage("redis://localhost:6379") limiter = MovingWindowRateLimiter(storage) limit = RateLimitItemPerSecond(1000) keys_start = storage.storage.keys('%s/*' % limit.namespace) # 100 routes fake_routes = [uuid4().hex for _ in range(0,100)] # go as fast as possible in 2 seconds. start = time.time() def smack(e): while not e.is_set(): self.assertTrue(limiter.hit(limit, random.choice(fake_routes))) events = [threading.Event() for _ in range(0,100)] threads = [threading.Thread(target=smack, args=(e,)) for e in events] [k.start() for k in threads] while time.time() - start < 2: time.sleep(0.1) [k.set() for k in events] time.sleep(2) self.assertTrue(storage.storage.keys("%s/*" % limit.namespace) == [])
async def before_server_start(app_: Sanic, loop): mzk.set_process_name(f'{app_.name} --- listener {mzk.get_my_pid()}') mzk.print_info(f'Starting Sanic server. -- {mzk.get_my_pid()}') app_.system_config: mzk.MocaConfig = mzk.MocaConfig(core.SYSTEM_CONFIG, manual_reload=True) app_.commands: mzk.MocaSynchronizedJSONDictFile = mzk.MocaSynchronizedJSONDictFile( core.COMMANDS_CONFIG, manual_reload=True) app_.ip_blacklist: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.IP_BLACKLIST_FILE, manual_reload=True, remove_duplicates=True, ) app_.api_key_config: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.API_KEY_FILE, manual_reload=True) app_.dict_cache = {} app_.secure_log = mzk.MocaFileLog(core.LOG_DIR.joinpath('secure.log')) app_.scheduler = mzk.MocaScheduler() if core.SERVER_CONFIG['rate_limiter_redis_storage'] is None: app_._storage_for_rate_limiter = MemoryStorage() else: app_._storage_for_rate_limiter = RedisStorage( core.SERVER_CONFIG['rate_limiter_redis_storage']) app_.rate_limiter = FixedWindowElasticExpiryRateLimiter( app_._storage_for_rate_limiter) def __reload_timer(application: Sanic) -> None: while True: mzk.sleep(1) application.system_config.reload_file() application.commands.reload_file() application.ip_blacklist.reload_file() application.api_key_config.reload_file() app_._timer_thread = Thread(target=__reload_timer, args=(app_, ), daemon=True) app_._timer_thread.start()
def setUp(self): self.storage_url = "redis+unix:///var/tmp/limits.redis.sock" self.storage = RedisStorage(self.storage_url) redis.from_url('unix:///var/tmp/limits.redis.sock').flushall()
def setUp(self): self.storage_url = "redis://localhost:7379" self.storage = RedisStorage(self.storage_url) redis.from_url(self.storage_url).flushall()
def setUp(self): self.storage_url = "redis://localhost:6379" self.storage = RedisStorage(self.storage_url) redis.Redis().flushall()
class RedisStorageTests(unittest.TestCase): def setUp(self): self.storage_url = "redis://localhost:6379" self.storage = RedisStorage(self.storage_url) redis.Redis().flushall() def test_redis(self): limiter = FixedWindowRateLimiter(self.storage) per_second = RateLimitItemPerSecond(10) start = time.time() count = 0 while time.time() - start < 0.5 and count < 10: self.assertTrue(limiter.hit(per_second)) count += 1 self.assertFalse(limiter.hit(per_second)) while time.time() - start <= 1: time.sleep(0.1) self.assertTrue(limiter.hit(per_second)) def test_redis_options(self): with mock.patch("limits.storage.get_dependency") as get_dependency: storage_from_string(self.storage_url, connection_timeout=1) self.assertEqual( get_dependency().from_url.call_args[1]['connection_timeout'], 1) def test_redis_reset(self): limiter = FixedWindowRateLimiter(self.storage) for i in range(0, 100): rate = RateLimitItemPerMinute(i) limiter.hit(rate) self.assertEqual(self.storage.reset(), 100) def test_redis_fixed_window_clear(self): limiter = FixedWindowRateLimiter(self.storage) per_min = RateLimitItemPerMinute(1) limiter.hit(per_min) self.assertFalse(limiter.hit(per_min)) limiter.clear(per_min) self.assertTrue(limiter.hit(per_min)) def test_redis_moving_window_clear(self): limiter = MovingWindowRateLimiter(self.storage) per_min = RateLimitItemPerMinute(1) limiter.hit(per_min) self.assertFalse(limiter.hit(per_min)) limiter.clear(per_min) self.assertTrue(limiter.hit(per_min)) def test_large_dataset_redis_moving_window_expiry(self): limiter = MovingWindowRateLimiter(self.storage) limit = RateLimitItemPerSecond(1000) # 100 routes fake_routes = [uuid4().hex for _ in range(0, 100)] # go as fast as possible in 2 seconds. start = time.time() def smack(e): while not e.is_set(): self.assertTrue(limiter.hit(limit, random.choice(fake_routes))) events = [threading.Event() for _ in range(0, 100)] threads = [threading.Thread(target=smack, args=(e, )) for e in events] [k.start() for k in threads] while time.time() - start < 2: time.sleep(0.1) [k.set() for k in events] time.sleep(2) self.assertTrue( self.storage.storage.keys("%s/*" % limit.namespace) == [])
async def before_server_start(app_: Sanic, loop): mzk.set_process_name(f'{app_.name} --- listener {mzk.get_my_pid()}') mzk.print_info(f'Starting Sanic server. -- {mzk.get_my_pid()}') app_.system_config: mzk.MocaConfig = mzk.MocaConfig(core.SYSTEM_CONFIG, manual_reload=True) app_.ip_blacklist: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.IP_BLACKLIST_FILE, manual_reload=True, remove_duplicates=True, ) app_.api_key_config: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.API_KEY_FILE, manual_reload=True) app_.twitter: mzk.MocaTwitter = mzk.MocaTwitter( core.TWITTER_CONFIG['CONSUMER_KEY'], core.TWITTER_CONFIG['CONSUMER_SECRET'], core.TWITTER_CONFIG['ACCESS_TOKEN'], core.TWITTER_CONFIG['ACCESS_TOKEN_SECRET']) app_.dict_cache = {} app_.secure_log = mzk.MocaFileLog(core.LOG_DIR.joinpath('secure.log')) app_.scheduler = mzk.MocaScheduler() if core.SERVER_CONFIG['rate_limiter_redis_storage'] is None: app_._storage_for_rate_limiter = MemoryStorage() else: app_._storage_for_rate_limiter = RedisStorage( core.SERVER_CONFIG['rate_limiter_redis_storage']) app_.rate_limiter = FixedWindowElasticExpiryRateLimiter( app_._storage_for_rate_limiter) try: app_.mysql = mzk.MocaMysql( core.DB_CONFIG['mysql']['host'], int(core.DB_CONFIG['mysql']['port']), core.DB_CONFIG['mysql']['user'], core.DB_CONFIG['mysql']['password'], core.DB_CONFIG['mysql']['database'], int(core.DB_CONFIG['mysql']['min_size']), int(core.DB_CONFIG['mysql']['max_size']), ) app_.mysql.force_sync = mzk.try_to_bool( core.DB_CONFIG['mysql']['force_sync']) except KeyError as e: mzk.print_error( f'Mysql database configuration error. missing key: {e}') mzk.sys_exit(1) except MySQLError as e: mzk.print_error( "Can't connect to MySQL database, Please check your database configuration." ) mzk.print_error("And make sure your database is online.") mzk.print_error( "You can use 'python3 moca.py test-mysql-con' to check your database." ) mzk.print_error(f"<MySQLError: {e}>") mzk.sys_exit(1) try: app_.redis = mzk.MocaRedis( core.DB_CONFIG['redis']['host'], int(core.DB_CONFIG['redis']['port']), int(core.DB_CONFIG['redis']['db']), core.DB_CONFIG['redis']['password'], int(core.DB_CONFIG['mysql']['min_size']), int(core.DB_CONFIG['mysql']['max_size']), ) app_.redis.prefix = core.DB_CONFIG['redis']['prefix'] await app_.redis.test_con() except KeyError as e: mzk.print_error( f'Redis database configuration error. missing key: {e}') mzk.sys_exit(1) except (RedisError, ConnectionRefusedError) as e: mzk.print_error( "Can't connect to Redis database, Please check your database configuration." ) mzk.print_error("And make sure your database is online.") mzk.print_error( "You can use 'python3 moca.py test-redis-con' to check your database." ) mzk.print_error(f"<(RedisError, ConnectionRefusedError): {e}>") mzk.sys_exit(1) try: app_.simple_cache = mzk.MocaSimpleCache( int(core.DB_CONFIG['simple_cache']['pool_size']), int(core.DB_CONFIG['simple_cache']['page_size']), ) except KeyError as e: mzk.print_error(f'SimpleCache configuration error. missing key: {e}') mzk.sys_exit(1) def __reload_timer(application: Sanic) -> None: while True: mzk.sleep(1) application.system_config.reload_file() application.ip_blacklist.reload_file() application.api_key_config.reload_file() app_._timer_thread = Thread(target=__reload_timer, args=(app_, ), daemon=True) app_._timer_thread.start()
async def before_server_start(app_: Sanic, loop): mzk.set_process_name(f'{app_.name} --- listener {mzk.get_my_pid()}') mzk.print_info(f'Starting Sanic server. -- {mzk.get_my_pid()}') app_.system_config: mzk.MocaConfig = mzk.MocaConfig(core.SYSTEM_CONFIG, manual_reload=True) app_.ip_blacklist: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.IP_BLACKLIST_FILE, manual_reload=True, remove_duplicates=True, ) app_.api_key_config: mzk.MocaSynchronizedJSONListFile = mzk.MocaSynchronizedJSONListFile( core.API_KEY_FILE, manual_reload=True) app_.flags = mzk.MocaSynchronizedJSONDictFile(core.FLAGS_FILE, manual_reload=True) app_.dict_cache = {} app_.secure_log = mzk.MocaFileLog(core.LOG_DIR.joinpath('secure.log')) app_.scheduler = mzk.MocaScheduler() if core.SERVER_CONFIG['rate_limiter_redis_storage'] is None: app_._storage_for_rate_limiter = MemoryStorage() else: app_._storage_for_rate_limiter = RedisStorage( core.SERVER_CONFIG['rate_limiter_redis_storage']) app_.rate_limiter = FixedWindowElasticExpiryRateLimiter( app_._storage_for_rate_limiter) try: app_.mysql = mzk.MocaMysql( core.DB_CONFIG['mysql']['host'], int(core.DB_CONFIG['mysql']['port']), core.DB_CONFIG['mysql']['user'], core.DB_CONFIG['mysql']['password'], core.DB_CONFIG['mysql']['database'], int(core.DB_CONFIG['mysql']['min_size']), int(core.DB_CONFIG['mysql']['max_size']), ) app_.mysql.force_sync = mzk.try_to_bool( core.DB_CONFIG['mysql']['force_sync']) except KeyError as e: mzk.print_error( f'Mysql database configuration error. missing key: {e}') mzk.sys_exit(1) except MySQLError as e: mzk.print_error( "Can't connect to MySQL database, Please check your database configuration." ) mzk.print_error("And make sure your database is online.") mzk.print_error( "You can use 'python3 moca.py test-mysql-con' to check your database." ) mzk.print_error(f"<MySQLError: {e}>") mzk.sys_exit(1) def __reload_timer(application: Sanic) -> None: while True: mzk.sleep(1) application.system_config.reload_file() application.ip_blacklist.reload_file() application.api_key_config.reload_file() application.flags.reload_file() app_._timer_thread = Thread(target=__reload_timer, args=(app_, ), daemon=True) app_._timer_thread.start() app_.bots = {} def reload_bot(the_updated_key, old_value, new_value, *args, **kwargs) -> None: application = kwargs['app'] for bot_dir in core.STORAGE_DIR.iterdir(): if bot_dir.is_dir(): application.bots[bot_dir.name] = mzk.MocaBot( bot_dir.name, bot_dir) con = application.mysql.get_a_new_con() cursor = con.cursor() cursor.execute(core.GET_BOTS_QUERY) res = cursor.fetchall() con.close() application.dict_cache['id'] = {} application.dict_cache['name'] = {} for info in res: application.dict_cache['id'][info[0]] = info[1] application.dict_cache['name'][info[1]] = info[0] reload_bot(None, None, None, app=app_) app_.flags.set('moca_bot_reload', False) app_.flags.add_handler('moca_bot_reload' + str(mzk.get_my_pid()), 'moca_bot_reload', reload_bot, kwargs={'app': app_})