Exemple #1
0
def test_simple_02():
    """ Multi-rates Limiter
    """
    rate_1 = RequestRate(5, 5 * Duration.SECOND)
    rate_2 = RequestRate(7, 9 * Duration.SECOND)
    limiter2 = Limiter(rate_1, rate_2)
    item = 'tranvu'

    with pytest.raises(BucketFullException):
        # Try add 6 items within 5 seconds
        # Exceed Rate-1
        for _ in range(6):
            limiter2.try_acquire(item)

    assert limiter2.get_current_volume(item) == 5

    sleep(6)
    # Still shorter than Rate-2 interval, so all items must be kept
    limiter2.try_acquire(item)
    limiter2.try_acquire(item)
    assert limiter2.get_current_volume(item) == 7

    with pytest.raises(BucketFullException):
        # Exceed Rate-2
        limiter2.try_acquire(item)

    sleep(6)
    # 12 seconds passed
    limiter2.try_acquire(item)
    # Only items within last 9 seconds kept, plus the new one
    assert limiter2.get_current_volume(item) == 3

    # print('Bucket Rate-1:', limiter2.get_filled_slots(rate_1, item))
    # print('Bucket Rate-2:', limiter2.get_filled_slots(rate_2, item))
    # Within the nearest 5 second interval
    # Rate-1 has only 1 item, so we can add 4 more
    limiter2.try_acquire(item)
    limiter2.try_acquire(item)
    limiter2.try_acquire(item)
    limiter2.try_acquire(item)

    with pytest.raises(BucketFullException):
        # Exceed Rate-1 again
        limiter2.try_acquire(item)

    # Withint the nearest 9 second-interval, we have 7 items
    assert limiter2.get_current_volume(item) == 7

    # Fast forward to 6 more seconds
    # Bucket Rate-1 is refreshed and empty by now
    # Bucket Rate-2 has now only 5 items
    sleep(6)
    # print('Bucket Rate-1:', limiter2.get_filled_slots(rate_1, item))
    # print('Bucket Rate-2:', limiter2.get_filled_slots(rate_2, item))
    limiter2.try_acquire(item)
    limiter2.try_acquire(item)

    with pytest.raises(BucketFullException):
        # Exceed Rate-2 again
        limiter2.try_acquire(item)
Exemple #2
0
 def __init__(self):
     self.whitelist = (list(SUDO_USERS) or []) + [OWNER_ID]
     Duration.CUSTOM = 15  # Custom duration, 15 seconds
     self.sec_limit = RequestRate(6, Duration.CUSTOM)  # 6 / Per 15 Seconds
     self.min_limit = RequestRate(20, Duration.MINUTE)  # 20 / Per minute
     self.hour_limit = RequestRate(100, Duration.HOUR)  # 100 / Per hour
     self.daily_limit = RequestRate(1000, Duration.DAY)  # 1000 / Per day
     self.limiter = Limiter(
         self.sec_limit,
         self.min_limit,
         self.hour_limit,
         self.daily_limit,
         bucket_class=MemoryListBucket)
Exemple #3
0
 def __init__(self):
     self.whitelist = (list(SUDO_USERS) or []) + (list(DEV_USERS) or [])
     #Values are HIGHLY experimental, its recommended you pay attention to our commits as we will be adjusting the values over time with what suits best.
     Duration.CUSTOM = 15  # Custom duration, 15 seconds
     self.sec_limit = RequestRate(6, Duration.CUSTOM)  # 6 / Per 15 Seconds
     self.min_limit = RequestRate(20, Duration.MINUTE)  # 20 / Per minute
     self.hour_limit = RequestRate(100, Duration.HOUR)  # 100 / Per hour
     self.daily_limit = RequestRate(1000, Duration.DAY)  # 1000 / Per day
     self.limiter = Limiter(self.sec_limit,
                            self.min_limit,
                            self.hour_limit,
                            self.daily_limit,
                            bucket_class=MemoryListBucket)
def test_concurrency(executor_class, bucket_class):
    """Make a fixed number of concurrent requests using a shared Limiter, and check the total time
    they take to run
    """
    logger.info(f"Testing {bucket_class.__name__} with {executor_class.__name__}")

    # Set up limiter
    bucket_kwargs = {
        "path": join(gettempdir(), f"test_{executor_class.__name__}.sqlite"),
    }
    limiter = Limiter(
        RequestRate(LIMIT_REQUESTS_PER_SECOND, Duration.SECOND),
        bucket_class=bucket_class,
        bucket_kwargs=bucket_kwargs,
    )

    # Set up request function
    bucket_ids = [f"{executor_class.__name__}_bucket_{i}" for i in range(N_BUCKETS)]
    start_time = perf_counter()
    request_func = partial(_send_request, limiter, bucket_ids, start_time)

    # Distribute requests across workers
    with executor_class(max_workers=N_WORKERS) as executor:
        list(executor.map(request_func, range(N_REQUESTS), timeout=300))

    # Check total time, with debug logging
    elapsed = perf_counter() - start_time
    expected_min_time = (N_REQUESTS - 1) / LIMIT_REQUESTS_PER_SECOND
    worker_type = "threads" if executor_class is ThreadPoolExecutor else "processes"
    logger.info(
        f"Ran {N_REQUESTS} requests with {N_WORKERS} {worker_type} in {elapsed:.2f} seconds\n"
        f"With a rate limit of {LIMIT_REQUESTS_PER_SECOND}/second, expected at least "
        f"{expected_min_time} seconds"
    )
    assert elapsed >= expected_min_time
Exemple #5
0
def test_simple_01(time_function):
    """Single-rate Limiter with RedisBucket"""
    rate = RequestRate(3, 5 * Duration.SECOND)
    limiter = Limiter(
        rate,
        bucket_class=RedisBucket,
        # Separate buckets used to distinct values from previous run,
        # as time_function return value has different int part.
        bucket_kwargs={
            "redis_pool": pool,
            "bucket_name": str(time_function)
        },
        time_function=time_function,
    )
    item = "vutran_list"

    with pytest.raises(BucketFullException):
        for _ in range(4):
            limiter.try_acquire(item)

    sleep(6)
    limiter.try_acquire(item)
    vol = limiter.get_current_volume(item)
    assert vol == 1

    limiter.try_acquire(item)
    limiter.try_acquire(item)

    with pytest.raises(BucketFullException):
        limiter.try_acquire(item)
Exemple #6
0
def test_simple_01():
    """ Single-rate Limiter with RedisBucket
    """
    rate = RequestRate(3, 5 * Duration.SECOND)
    limiter = Limiter(
        rate,
        bucket_class=RedisBucket,
        bucket_kwargs={
            "redis_pool": pool,
            "bucket_name": "test-bucket-1"
        },
    )
    item = 'vutran_list'

    with pytest.raises(BucketFullException):
        for _ in range(4):
            limiter.try_acquire(item)

    sleep(6)
    limiter.try_acquire(item)
    vol = limiter.get_current_volume(item)
    assert vol == 1

    limiter.try_acquire(item)
    limiter.try_acquire(item)

    with pytest.raises(BucketFullException):
        limiter.try_acquire(item)
Exemple #7
0
    def __init__(self):
        Borg.__init__(self)

        database_user = get_docker_secret("seraphsix_pg_db_user",
                                          default="seraphsix")
        database_password = get_docker_secret("seraphsix_pg_db_pass")
        database_host = get_docker_secret("seraphsix_pg_db_host",
                                          default="localhost")
        database_port = get_docker_secret("seraphsix_pg_db_port",
                                          default="5432")
        database_name = get_docker_secret("seraphsix_pg_db_name",
                                          default="seraphsix")
        self.database_conns = get_docker_secret("seraphsix_pg_db_conns",
                                                default=DB_MAX_CONNECTIONS,
                                                cast_to=int)

        database_auth = f"{database_user}:{database_password}"
        self.database_url = f"postgres://{database_auth}@{database_host}:{database_port}/{database_name}"

        redis_password = get_docker_secret("seraphsix_redis_pass")
        redis_host = get_docker_secret("seraphsix_redis_host",
                                       default="localhost")
        redis_port = get_docker_secret("seraphsix_redis_port", default="6379")
        self.redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}"

        self.arq_redis = RedisSettings.from_dsn(f"{self.redis_url}/1")

        self.destiny = DestinyConfig()
        self.the100 = The100Config()
        self.twitter = TwitterConfig()
        self.discord_api_key = get_docker_secret("discord_api_key")
        self.home_server = get_docker_secret("home_server", cast_to=int)
        self.log_channel = get_docker_secret("home_server_log_channel",
                                             cast_to=int)
        self.reg_channel = get_docker_secret("home_server_reg_channel",
                                             cast_to=int)
        self.enable_activity_tracking = get_docker_secret(
            "enable_activity_tracking", cast_to=bool)

        self.flask_app_key = (os.environb[b"FLASK_APP_KEY"].decode(
            "unicode-escape").encode("latin-1"))

        self.activity_cutoff = get_docker_secret("activity_cutoff")
        if self.activity_cutoff:
            self.activity_cutoff = datetime.strptime(
                self.activity_cutoff, "%Y-%m-%d").astimezone(tz=pytz.utc)

        self.root_log_level = get_docker_secret("root_log_level",
                                                default=ROOT_LOG_LEVEL,
                                                cast_to=str)

        bucket_kwargs = {
            "redis_pool": ConnectionPool.from_url(self.redis_url),
            "bucket_name": "ratelimit",
        }
        destiny_api_rate = RequestRate(20, Duration.SECOND)
        self.destiny_api_limiter = Limiter(destiny_api_rate,
                                           bucket_class=RedisBucket,
                                           bucket_kwargs=bucket_kwargs)
def limit(user):
    rate = RequestRate(2, Duration.DAY)
    limiter = Limiter(rate, bucket_class=MemoryListBucket)

    identity = user
    try:
        limiter.try_acquire(identity)
        return True
    except BucketFullException as err:
        return False
def _send_requests(start_time: float, end_time: float, db_path: str, n_process: int, n_requests: int):
    """Send several rate-limited requests, with a separate Limiter per process."""
    limiter = Limiter(
        RequestRate(LIMIT_REQUESTS_PER_SECOND, Duration.SECOND),
        bucket_class=FileLockSQLiteBucket,
        bucket_kwargs={"path": db_path},
    )
    bucket_ids = [f"bucket_{i}" for i in range(N_BUCKETS)]

    while perf_counter() < end_time:
        with limiter.ratelimit(*bucket_ids, delay=True):
            logger.debug(f"[Process {n_process}] t + {(perf_counter() - start_time):.5f}: Request {n_requests}")
            n_requests += 1
Exemple #10
0
async def test_ratelimit__exceeds_max_delay_async(time_function):
    """Test ratelimit decorator with automatic delays - async version"""
    limiter = Limiter(RequestRate(5, Duration.MINUTE),
                      time_function=time_function)

    @limiter.ratelimit(uuid4(), delay=True, max_delay=10)
    async def limited_function():
        pass

    # This should exceed the rate limit, with a delay above max_delay, and raise an error
    tasks = [limited_function() for _ in range(10)]
    with pytest.raises(BucketFullException):
        await asyncio.gather(*tasks)
Exemple #11
0
def test_ratelimit__exceeds_max_delay_synchronous(time_function):
    """Test ratelimit decorator with automatic delays - synchronous version"""
    limiter = Limiter(RequestRate(5, Duration.MINUTE),
                      time_function=time_function)

    @limiter.ratelimit(uuid4(), delay=True, max_delay=10)
    def limited_function():
        pass

    # This should exceed the rate limit, with a delay above max_delay, and raise an error
    with pytest.raises(BucketFullException):
        for _ in range(10):
            limited_function()
Exemple #12
0
def test_remaining_time(time_function):
    """The remaining_time metadata returned from a BucketFullException should take into account
    the time elapsed during limited calls (including values less than 1 second).
    """
    limiter2 = Limiter(RequestRate(5, Duration.SECOND), time_function=time_function)
    for _ in range(5):
        limiter2.try_acquire("item")
    sleep(0.1)

    try:
        limiter2.try_acquire("item")
    except BucketFullException as err:
        delay_time = err.meta_info["remaining_time"]

    assert 0.8 < delay_time < 0.9
Exemple #13
0
def test_redis_cluster():
    """Testing RedisClusterBucket initialization"""
    rate = RequestRate(3, 5 * Duration.SECOND)
    bucket = RedisClusterBucket(pool=pool,
                                bucket_name="any-name",
                                identity="id-string")
    Limiter(
        rate,
        bucket_class=RedisClusterBucket,
        bucket_kwargs={
            "redis_pool": pool,
            "bucket_name": "test-bucket-1"
        },
    )

    assert bucket.get_connection()
Exemple #14
0
def test_ratelimit__delay_synchronous(time_function):
    """Test ratelimit decorator with automatic delays - synchronous version"""
    limiter = Limiter(RequestRate(10, Duration.SECOND),
                      time_function=time_function)

    @limiter.ratelimit(uuid4(), delay=True)
    def limited_function():
        pass

    # This should insert appropriate delays to stay within the rate limit
    start = time()
    for _ in range(22):
        limited_function()

    # Exact time will depend on the test environment, but it should be slightly more than 2 seconds
    elapsed = time() - start
    assert 2 < elapsed <= 3
Exemple #15
0
    def __init__(self):
        Borg.__init__(self)

        database_user = get_docker_secret('seraphsix_pg_db_user', default='seraphsix')
        database_password = get_docker_secret('seraphsix_pg_db_pass')
        database_host = get_docker_secret('seraphsix_pg_db_host', default='localhost')
        database_port = get_docker_secret('seraphsix_pg_db_port', default='5432')
        database_name = get_docker_secret('seraphsix_pg_db_name', default='seraphsix')
        self.database_conns = get_docker_secret('seraphsix_pg_db_conns', default=DB_MAX_CONNECTIONS, cast_to=int)

        database_auth = f"{database_user}:{database_password}"
        self.database_url = f"postgres://{database_auth}@{database_host}:{database_port}/{database_name}"

        redis_password = get_docker_secret('seraphsix_redis_pass')
        redis_host = get_docker_secret('seraphsix_redis_host', default='localhost')
        redis_port = get_docker_secret('seraphsix_redis_port', default='6379')
        self.redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}"

        self.arq_redis = RedisSettings.from_dsn(f'{self.redis_url}/1')

        self.destiny = DestinyConfig()
        self.the100 = The100Config()
        self.twitter = TwitterConfig()
        self.discord_api_key = get_docker_secret('discord_api_key')
        self.home_server = get_docker_secret('home_server', cast_to=int)
        self.log_channel = get_docker_secret('home_server_log_channel', cast_to=int)
        self.reg_channel = get_docker_secret('home_server_reg_channel', cast_to=int)
        self.enable_activity_tracking = get_docker_secret('enable_activity_tracking', cast_to=bool)

        self.flask_app_key = os.environb[b'FLASK_APP_KEY'].decode('unicode-escape').encode('latin-1')

        self.activity_cutoff = get_docker_secret('activity_cutoff')
        if self.activity_cutoff:
            self.activity_cutoff = datetime.strptime(self.activity_cutoff, '%Y-%m-%d').astimezone(tz=pytz.utc)

        self.root_log_level = get_docker_secret('root_log_level', default=ROOT_LOG_LEVEL, cast_to=str)

        bucket_kwargs = {
            "redis_pool": ConnectionPool.from_url(self.redis_url),
            "bucket_name": "ratelimit"
        }
        destiny_api_rate = RequestRate(20, Duration.SECOND)
        self.destiny_api_limiter = Limiter(destiny_api_rate, bucket_class=RedisBucket, bucket_kwargs=bucket_kwargs)
Exemple #16
0
def test_simple_03():
    """Single-rate Limiter with MemoryListBucket"""
    rate = RequestRate(3, 5 * Duration.SECOND)
    limiter = Limiter(rate, bucket_class=MemoryListBucket)
    item = "vutran_list"

    with pytest.raises(BucketFullException):
        for _ in range(4):
            limiter.try_acquire(item)

    sleep(6)
    limiter.try_acquire(item)
    vol = limiter.get_current_volume(item)
    assert vol == 1

    limiter.try_acquire(item)
    limiter.try_acquire(item)

    with pytest.raises(BucketFullException):
        limiter.try_acquire(item)
Exemple #17
0
async def test_ratelimit__async(time_function):
    """Test ratelimit decorator - async version"""
    limiter = Limiter(RequestRate(10, Duration.SECOND),
                      time_function=time_function)

    @limiter.ratelimit(uuid4())
    async def limited_function():
        pass

    # If we stay under 10 requests/sec, expect no errors
    for _ in range(12):
        await limited_function()
        await asyncio.sleep(0.2)

    await asyncio.sleep(1)

    # If we exceed 10 requests/sec, expect an exception
    tasks = [limited_function() for _ in range(12)]
    with pytest.raises(BucketFullException):
        await asyncio.gather(*tasks)
Exemple #18
0
def test_ratelimit__synchronous(time_function):
    """Test ratelimit decorator - synchronous version"""
    limiter = Limiter(RequestRate(10, Duration.SECOND),
                      time_function=time_function)

    @limiter.ratelimit(uuid4())
    def limited_function():
        pass

    # If we stay under 10 requests/sec, expect no errors
    for _ in range(12):
        limited_function()
        sleep(0.1)

    sleep(1)

    # If we exceed 10 requests/sec, expect an exception
    with pytest.raises(BucketFullException):
        for _ in range(12):
            limited_function()
Exemple #19
0
def test_simple_01():
    """ Single-rate Limiter
    """
    rate = RequestRate(3, 5 * Duration.SECOND)
    limiter = Limiter(rate)
    item = 'vutran'

    with pytest.raises(BucketFullException):
        for _ in range(4):
            limiter.try_acquire(item)

    sleep(6)
    limiter.try_acquire(item)
    vol = limiter.get_current_volume(item)
    assert vol == 1

    limiter.try_acquire(item)
    limiter.try_acquire(item)

    with pytest.raises(BucketFullException):
        limiter.try_acquire(item)
Exemple #20
0
def test_sleep(time_function):
    """Make requests at a rate of 6 requests per 5 seconds (avg. 1.2 requests per second).
    If each request takes ~0.5 seconds, then the bucket should be full after 6 requests (3 seconds).
    Run 15 requests, and expect a total of 2 delays required to stay within the rate limit.
    """
    rate = RequestRate(6, 5 * Duration.SECOND)
    limiter = Limiter(rate, time_function=time_function)
    track_sleep = Mock(side_effect=sleep)  # run time.sleep() and track the number of calls
    start = time()

    for i in range(15):
        try:
            limiter.try_acquire("test")
            print(f"[{time() - start:07.4f}] Pushed: {i+1} items")
            sleep(0.5)  # Simulated request rate
        except BucketFullException as err:
            print(err.meta_info)
            track_sleep(err.meta_info["remaining_time"])

    print(f"Elapsed: {time() - start:07.4f} seconds")
    assert track_sleep.call_count == 2
Exemple #21
0
def test_flushing():
    """Multi-rates Limiter with RedisBucket"""
    rate_1 = RequestRate(5, 5 * Duration.SECOND)
    limiter = Limiter(
        rate_1,
        bucket_class=RedisBucket,
        bucket_kwargs={
            "redis_pool": pool,
            "bucket_name": "Flushing-Bucket",
        },
    )
    item = "redis-test-item"

    for _ in range(3):
        limiter.try_acquire(item)

    size = limiter.get_current_volume(item)
    assert size == 3
    assert limiter.flush_all() == 1

    size = limiter.get_current_volume(item)
    assert size == 0
Exemple #22
0
def test_simple_01():
    """Single-rate Limiter"""
    with pytest.raises(InvalidParams):
        # No rates provided
        Limiter()

    with pytest.raises(InvalidParams):
        rate_1 = RequestRate(3, 5 * Duration.SECOND)
        rate_2 = RequestRate(4, 5 * Duration.SECOND)
        Limiter(rate_1, rate_2)

    rate = RequestRate(3, 5 * Duration.SECOND)

    with pytest.raises(ImmutableClassProperty):
        rate.limit = 10

    with pytest.raises(ImmutableClassProperty):
        rate.interval = 10

    limiter = Limiter(rate)
    item = "vutran"

    has_raised = False
    try:
        for _ in range(4):
            limiter.try_acquire(item)
            sleep(1)
    except BucketFullException as err:
        has_raised = True
        print(err)
        assert str(err)
        assert isinstance(err.meta_info, dict)
        assert 1.9 < err.meta_info["remaining_time"] < 2.0

    assert has_raised

    sleep(6)
    limiter.try_acquire(item)
    vol = limiter.get_current_volume(item)
    assert vol == 1

    limiter.try_acquire(item)
    limiter.try_acquire(item)

    with pytest.raises(BucketFullException):
        limiter.try_acquire(item)
Exemple #23
0
def test_ratelimit__contextmanager_synchronous(time_function):
    """Test ratelimit decorator with contextmanager - synchronous version
    Aside from using __enter__ and __exit__, all behavior is identical to the decorator version,
    so this only needs one test case
    """
    limiter = Limiter(RequestRate(10, Duration.SECOND),
                      time_function=time_function)
    identity = uuid4()

    def limited_function():
        with limiter.ratelimit(identity):
            pass

    # If we stay under 10 requests/sec, expect no errors
    for _ in range(12):
        limited_function()
        sleep(0.1)

    sleep(1)

    # If we exceed 10 requests/sec, expect an exception
    with pytest.raises(BucketFullException):
        for _ in range(12):
            limited_function()
Exemple #24
0
async def test_ratelimit__contextmanager_async(time_function):
    """Test ratelimit decorator with contextmanager - async version
    Aside from using __aenter__ and __aexit__, all behavior is identical to the decorator version,
    so this only needs one test case
    """
    limiter = Limiter(RequestRate(10, Duration.SECOND),
                      time_function=time_function)
    identity = uuid4()

    async def limited_function():
        async with limiter.ratelimit(identity):
            pass

    # If we stay under 10 requests/sec, expect no errors
    for _ in range(12):
        await limited_function()
        await asyncio.sleep(0.1)

    await asyncio.sleep(1)

    # If we exceed 10 requests/sec, expect an exception
    tasks = [limited_function() for _ in range(12)]
    with pytest.raises(BucketFullException):
        await asyncio.gather(*tasks)
Exemple #25
0
from .Constants import NOTES_URL
from .Constants import POOL_URL
from .Constants import POOLS_URL
from .Constants import POST_URL
from .Constants import POSTS_URL
from .Exceptions import UserError

if TYPE_CHECKING:
    import aiohttp
    import requests

T = TypeVar("T")
MaybeAwaitable = Union[T, Awaitable[T]]
RequestResponse = MaybeAwaitable[dict]
ArrayRequestResponse = MaybeAwaitable[List[dict]]
limiter = Limiter(RequestRate(2, 1))


class AbstractYippi(ABC):
    """An abstract class (abc) for all the Yippi's client.

    Generally you don't really need to use this, except if you want to use
    different implementation for the client.

    Args:
        project_name: Your project's name where this library is going to be used.
        version: You project's version number.
        creator: Your e621 username.
        session: The HTTP client session object.
        loop: The event loop to run on. This is only required on async client.
            "CLIENT_CLASS": "django_redis.client.DefaultClient",
        },
    }
}


class SCRedisBucket(RedisBucket):
    """Override RedisBucket to use django-redis default client"""
    def get_connection(self):
        return self._pool


# Initialize limiter with django-redis connection
redis_connection = django_redis.get_redis_connection("default")

rate = RequestRate(2, 10 * Duration.SECOND)


def test_acquire(time_function):
    # Separate buckets used to distinct values from previous run,
    # as time_function return value has different int part.
    bucket_kwargs = {
        "bucket_name": str(time_function),
        "redis_pool": redis_connection,
    }

    limiter = Limiter(
        rate,
        bucket_class=SCRedisBucket,
        bucket_kwargs=bucket_kwargs,
        time_function=time_function,
Exemple #27
0
    def __init__(self, bot):
        self.bot = bot
        self.config = Config.get_conf(self, 8818154, force_registration=True)
        # PyRateLimit.init(redis_host="localhost", redis_port=6379)
        hourly_rate5 = RequestRate(5, Duration.HOUR)
        hourly_rate3 = RequestRate(3, Duration.HOUR)
        self.kicklimiter = Limiter(hourly_rate5)
        self.banlimiter = Limiter(hourly_rate3)
        # self.kicklimit = PyRateLimit()
        # self.kicklimit.create(3600, 5)
        # self.banlimit = PyRateLimit()
        # self.banlimit.create(3600, 3)

        default_global = {
            'notifs': {
                'kick': [],
                'ban': [],
                'mute': [],
                'jail': [],
                'channelperms': [],
                'editchannel': [],
                'deletemessages': [],
                'ratelimit': [],
                'adminrole': [],
                'bot':[],
                'warn':[]
            },
            'notifchannels' : {
                'kick': [],
                'ban': [],
                'mute': [],
                'jail': [],
                'channelperms': [],
                'editchannel': [],
                'deletemessages': [],
                'ratelimit': [],
                'adminrole': [],
                'bot':[],
                'warn':[]
            }
        }
        default_guild = {
            'perms': {
                'kick': [],
                'ban': [],
                'mute': [],
                'jail': [],
                'channelperms': [],
                'editchannel': [],
                'deletemessages': [],
                'warn': []
            },
            'roles': {
                'warning1': None,
                'warning2': None,
                'warning3+': None,
                'jailed': None,
                'muted': None
            }
        }
        self.config.register_guild(**default_guild)   
        self.config.register_global(**default_global)   
        self.permkeys = [
            'kick',
            'ban',
            'mute',
            'jail',
            'channelperms',
            'editchannel',
            'deletemessages',
            'warn'
        ]
        self.notifkeys = [
            'kick',
            'ban',
            'mute',
            'jail',
            'channelperms',
            'editchannel',
            'deletemessages',
            'ratelimit',
            'adminrole',
            'bot',
            'warn'
        ]
        self.rolekeys = [
            'warning1',
            'warning2',
            'warning3+',
            'jailed',
            'muted'
        ]
from google.auth.credentials import Credentials
from pyrate_limiter import Duration
from pyrate_limiter import Limiter
from pyrate_limiter import MemoryListBucket
from pyrate_limiter import RequestRate
from requests import Response
from requests import Session
from requests_oauth2 import OAuth2BearerToken

import google.auth
import google.auth.transport.requests

from clouddq.integration.gcp_credentials import GcpCredentials

limiter = Limiter(
    RequestRate(2, Duration.SECOND),
    RequestRate(8, Duration.MINUTE),
    bucket_class=MemoryListBucket,
)

logger = logging.getLogger(__name__)


class DataplexClient:
    _gcp_credentials: GcpCredentials
    _headers: dict
    _session: Session
    _auth_token: str
    gcp_project_id: str
    location_id: str
    lake_name: str
Exemple #29
0
def test_simple_02(time_function):
    """Multi-rates Limiter with RedisBucket"""
    rate_1 = RequestRate(5, 5 * Duration.SECOND)
    rate_2 = RequestRate(7, 9 * Duration.SECOND)
    limiter4 = Limiter(
        rate_1,
        rate_2,
        bucket_class=RedisBucket,
        bucket_kwargs={
            "redis_pool": pool,
            # Separate buckets used to distinct values from previous run,
            # as time_function return value has different int part.
            "bucket_name": str(time_function),
        },
        time_function=time_function,
    )
    item = "redis-test-item"

    with pytest.raises(BucketFullException):
        # Try add 6 items within 5 seconds
        # Exceed Rate-1
        for _ in range(6):
            limiter4.try_acquire(item)

    assert limiter4.get_current_volume(item) == 5

    sleep(6.5)
    # Still shorter than Rate-2 interval, so all items must be kept
    limiter4.try_acquire(item)
    # print('Bucket Rate-1:', limiter4.get_filled_slots(rate_1, item))
    # print('Bucket Rate-2:', limiter4.get_filled_slots(rate_2, item))
    limiter4.try_acquire(item)
    assert limiter4.get_current_volume(item) == 7

    with pytest.raises(BucketFullException):
        # Exceed Rate-2
        limiter4.try_acquire(item)

    sleep(6)
    # 12 seconds passed
    limiter4.try_acquire(item)
    # Only items within last 9 seconds kept, plus the new one
    assert limiter4.get_current_volume(item) == 3

    # print('Bucket Rate-1:', limiter4.get_filled_slots(rate_1, item))
    # print('Bucket Rate-2:', limiter4.get_filled_slots(rate_2, item))
    # Within the nearest 5 second interval
    # Rate-1 has only 1 item, so we can add 4 more
    limiter4.try_acquire(item)
    limiter4.try_acquire(item)
    limiter4.try_acquire(item)
    limiter4.try_acquire(item)

    with pytest.raises(BucketFullException):
        # Exceed Rate-1 again
        limiter4.try_acquire(item)

    # Withint the nearest 9 second-interval, we have 7 items
    assert limiter4.get_current_volume(item) == 7

    # Fast forward to 6 more seconds
    # Bucket Rate-1 is refreshed and empty by now
    # Bucket Rate-2 has now only 5 items
    sleep(6)
    # print('Bucket Rate-1:', limiter4.get_filled_slots(rate_1, item))
    # print('Bucket Rate-2:', limiter4.get_filled_slots(rate_2, item))
    limiter4.try_acquire(item)
    limiter4.try_acquire(item)

    with pytest.raises(BucketFullException):
        # Exceed Rate-2 again
        limiter4.try_acquire(item)
import backoff
from pyrate_limiter import Limiter, RequestRate, Duration, MemoryListBucket

from pywallhaven import Wallhaven
from pywallhaven.exceptions import RateLimitError

rate = RequestRate(45, Duration.MINUTE)

limiter = Limiter(rate, bucket_class=MemoryListBucket)

# since the above limiter is global, it can be reused in other calls such as a request to download a specific wallpaper


class LimitedWallhaven(Wallhaven):

    @backoff.on_exception(backoff.expo, RateLimitError)
    @limiter.ratelimit('endpoint', delay=True)
    def get_endpoint(self, url) -> dict:
        """
        overrides Wallhaven.get_endpoint()

        Rate limited by limiter defined outside class to 45 calls per minute
        Also implements a backoff in case other processes make calls to wallhaven.cc
        """
        return super().get_endpoint(url)