def kick(self, user_id, guild_id, reason=None): r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id) if reason: # thanks aiohttp r.url = '{0.url}?reason={1}'.format(r, _uriquote(reason)) return self.request(r)
def request(self, verb, url, payload=None, multipart=None, *, files=None, reason=None): headers = {} data = None files = files or [] if payload: headers['Content-Type'] = 'application/json' data = utils.to_json(payload) if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') if multipart is not None: data = {'payload_json': multipart.pop('payload_json')} for tries in range(5): for file in files: file.reset(seek=tries) r = self.session.request(verb, url, headers=headers, data=data, files=multipart) r.encoding = 'utf-8' # Coerce empty responses to return None for hygiene purposes response = r.text or None # compatibility with aiohttp r.status = r.status_code if r.headers['Content-Type'] == 'application/json': response = json.loads(response) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429 and self.sleep: delta = utils._parse_ratelimit_header(r) time.sleep(delta) if 300 > r.status >= 200: return response # we are being rate limited if r.status == 429: if self.sleep: retry_after = response['retry_after'] / 1000.0 time.sleep(retry_after) continue else: raise HTTPException(r, response) if self.sleep and r.status in (500, 502): time.sleep(1 + tries * 2) continue if r.status == 403: raise Forbidden(r, response) elif r.status == 404: raise NotFound(r, response) else: raise HTTPException(r, response) # no more retries raise HTTPException(r, response)
def __init__(self, **fields) -> None: for k, v in fields.items(): if isinstance(v, str): fields[k] = _uriquote(v) self.url = self.BASE + self.path.format(**fields) # route parameters self._channel_id = fields.get('channel_id') self._guild_id = fields.get('guild_id')
def __init__(self, method, path, **params): self.path = path self.method = method url = self.BASE + self.path if params: self.url = url.format( **{k: _uriquote(v) if isinstance(v, str) else v for k, v in params.items()}) else: self.url = url
async def request(self, verb, url, payload=None, multipart=None, *, files=None, reason=None): headers = {} data = None files = files or [] if payload: headers['Content-Type'] = 'application/json' data = utils.to_json(payload) if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') if multipart: data = aiohttp.FormData() for key, value in multipart.items(): if key.startswith('file'): data.add_field(key, value[1], filename=value[0], content_type=value[2]) else: data.add_field(key, value) for tries in range(5): for file in files: file.reset(seek=tries) async with self.session.request(verb, url, headers=headers, data=data) as r: # Coerce empty strings to return None for hygiene purposes response = (await r.text(encoding='utf-8')) or None if r.headers['Content-Type'] == 'application/json': response = json.loads(response) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: delta = utils._parse_ratelimit_header(r) await asyncio.sleep(delta) if 300 > r.status >= 200: return response # we are being rate limited if r.status == 429: retry_after = response['retry_after'] / 1000.0 await asyncio.sleep(retry_after) continue if r.status in (500, 502): await asyncio.sleep(1 + tries * 2) continue if r.status == 403: raise Forbidden(r, response) elif r.status == 404: raise NotFound(r, response) else: raise HTTPException(r, response) # no more retries raise HTTPException(r, response)
def __init__(self, method, url, **parameters): self.method = method if parameters: self.url = url.format( **{ k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items() }) else: self.url = url
def ban(self, user_id, guild_id, delete_message_days=1, reason=None): r = Route('PUT', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id) params = { 'delete-message-days': delete_message_days, } if reason: # thanks aiohttp r.url = '{0.url}?reason={1}'.format(r, _uriquote(reason)) return self.request(r, params=params)
def kick(self, user_id, guild_id, reason=None): r = Route( "DELETE", "/guilds/{guild_id}/members/{user_id}", guild_id=guild_id, user_id=user_id, ) if reason: # thanks aiohttp r.url = "{0.url}?reason={1}".format(r, _uriquote(reason)) return self.request(r)
def __init__(self, method, path, **parameters): self.path = path self.method = method url = (self.BASE + self.path) if parameters: self.url = url.format(**{k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items()}) else: self.url = url # major parameters: self.channel_id = parameters.get('channel_id') self.guild_id = parameters.get('guild_id')
def ban(self, user_id, guild_id, delete_message_days=1, reason=None): r = Route("PUT", "/guilds/{guild_id}/bans/{user_id}", guild_id=guild_id, user_id=user_id) params = {"delete-message-days": delete_message_days} if reason: # thanks aiohttp r.url = "{0.url}?reason={1}".format(r, _uriquote(reason)) return self.request(r, params=params)
def __init__(self, method: str, path: str, **parameters: Any) -> None: self.path: str = path self.method: str = method url = self.BASE + self.path if parameters: url = url.format_map({ k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items() }) self.url: str = url # major parameters: self.channel_id = parameters.get("channel_id") self.guild_id = parameters.get("guild_id") self.webhook_id = parameters.get("webhook_id") self.webhook_token = parameters.get("webhook_token") self.interaction_id = parameters.get("interaction_id")
async def request(self, verb, url, payload=None, multipart=None, *, files=None, reason=None): headers = {} data = None files = files or [] if payload: headers['Content-Type'] = 'application/json' data = utils.to_json(payload) if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') if multipart: data = aiohttp.FormData() for key, value in multipart.items(): if key.startswith('file'): data.add_field(key, value[1], filename=value[0], content_type=value[2]) else: data.add_field(key, value) base_url = url.replace(self._request_url, '/') or '/' _id = self._webhook_id for tries in range(5): for file in files: file.reset(seek=tries) async with self.session.request(verb, url, headers=headers, data=data) as r: log.debug( 'Webhook ID %s with %s %s has returned status code %s', _id, verb, base_url, r.status) # Coerce empty strings to return None for hygiene purposes response = (await r.text(encoding='utf-8')) or None if r.headers['Content-Type'] == 'application/json': response = json.loads(response) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: delta = utils._parse_ratelimit_header(r) log.debug( 'Webhook ID %s has been pre-emptively rate limited, waiting %.2f seconds', _id, delta) await asyncio.sleep(delta) if 300 > r.status >= 200: return response # we are being rate limited if r.status == 429: if not r.headers.get('Via'): # Banned by Cloudflare more than likely. raise HTTPException(r, data) retry_after = response['retry_after'] / 1000.0 log.warning( 'Webhook ID %s is rate limited. Retrying in %.2f seconds', _id, retry_after) await asyncio.sleep(retry_after) continue if r.status in (500, 502): await asyncio.sleep(1 + tries * 2) continue if r.status == 403: raise Forbidden(r, response) elif r.status == 404: raise NotFound(r, response) else: raise HTTPException(r, response) # no more retries if r.status >= 500: raise DiscordServerError(r, response) raise HTTPException(r, response)
def request(self, verb, url, payload=None, multipart=None, *, files=None, reason=None): headers = {} data = None files = files or [] if payload: headers['Content-Type'] = 'application/json' data = utils.to_json(payload) if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') if multipart is not None: data = {'payload_json': multipart.pop('payload_json')} base_url = url.replace(self._request_url, '/') or '/' _id = self._webhook_id for tries in range(5): for file in files: file.reset(seek=tries) r = self.session.request(verb, url, headers=headers, data=data, files=multipart) r.encoding = 'utf-8' # Coerce empty responses to return None for hygiene purposes response = r.text or None # compatibility with aiohttp r.status = r.status_code log.debug('Webhook ID %s with %s %s has returned status code %s', _id, verb, base_url, r.status) if r.headers['Content-Type'] == 'application/json': response = json.loads(response) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429 and self.sleep: delta = utils._parse_ratelimit_header(r) log.debug( 'Webhook ID %s has been pre-emptively rate limited, waiting %.2f seconds', _id, delta) time.sleep(delta) if 300 > r.status >= 200: return response # we are being rate limited if r.status == 429: if self.sleep: if not r.headers.get('Via'): # Banned by Cloudflare more than likely. raise HTTPException(r, data) retry_after = response['retry_after'] / 1000.0 log.warning( 'Webhook ID %s is rate limited. Retrying in %.2f seconds', _id, retry_after) time.sleep(retry_after) continue else: raise HTTPException(r, response) if self.sleep and r.status in (500, 502): time.sleep(1 + tries * 2) continue if r.status == 403: raise Forbidden(r, response) elif r.status == 404: raise NotFound(r, response) else: raise HTTPException(r, response) # no more retries if r.status >= 500: raise DiscordServerError(r, response) raise HTTPException(r, response)
async def request(self, route, *, files=None, **kwargs): bucket = route.bucket method = route.method url = route.url lock = self._locks.get(bucket) if lock is None: lock = asyncio.Lock() if bucket is not None: self._locks[bucket] = lock # header creation headers = { 'User-Agent': self.user_agent, 'X-Ratelimit-Precision': 'millisecond', } if self.token is not None: headers[ 'Authorization'] = 'Bot ' + self.token if self.bot_token else self.token # some checking if it's a JSON request if 'json' in kwargs: headers['Content-Type'] = 'application/json' kwargs['data'] = utils.to_json(kwargs.pop('json')) try: reason = kwargs.pop('reason') except KeyError: pass else: if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') kwargs['headers'] = headers # Proxy support if self.proxy is not None: kwargs['proxy'] = self.proxy if self.proxy_auth is not None: kwargs['proxy_auth'] = self.proxy_auth if not self._global_over.is_set(): # wait until the global lock is complete await self._global_over.wait() await lock.acquire() with MaybeUnlock(lock) as maybe_lock: for tries in range(5): if files: for f in files: f.reset(seek=tries) try: async with self.__session.request(method, url, **kwargs) as r: log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), r.status) # even errors have text involved in them so this is safe to call data = await json_or_text(r) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: # we've depleted our current bucket delta = utils._parse_ratelimit_header( r, use_clock=self.use_clock) log.debug( 'A rate limit bucket has been exhausted (bucket: %s, retry: %s).', bucket, delta) maybe_lock.defer() self.loop.call_later(delta, lock.release) # the request was successful so just return the text/json if 300 > r.status >= 200: log.debug('%s %s has received %s', method, url, data) return data # we are being rate limited if r.status == 429: if not r.headers.get('Via'): # Banned by Cloudflare more than likely. raise HTTPException(r, data) fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"' # sleep a bit retry_after = data['retry_after'] / 1000.0 log.warning(fmt, retry_after, bucket) # check if it's a global rate limit is_global = data.get('global', False) if is_global: log.warning( 'Global rate limit has been hit. Retrying in %.2f seconds.', retry_after) self._global_over.clear() await asyncio.sleep(retry_after) log.debug( 'Done sleeping for the rate limit. Retrying...' ) # release the global lock now that the # global rate limit has passed if is_global: self._global_over.set() log.debug('Global rate limit is now over.') continue # we've received a 500 or 502, unconditional retry if r.status in {500, 502}: await asyncio.sleep(1 + tries * 2) continue # the usual error cases if r.status == 403: raise Forbidden(r, data) elif r.status == 404: raise NotFound(r, data) elif r.status == 503: raise DiscordServerError(r, data) else: raise HTTPException(r, data) # This is handling exceptions from the request except OSError as e: # Connection reset by peer if tries < 4 and e.errno in (54, 10054): continue raise # We've run out of retries, raise. if r.status >= 500: raise DiscordServerError(r, data) raise HTTPException(r, data)
async def request(self, route, *, files=None, **kwargs): bucket = route.bucket method = route.method url = route.url # header creation headers = { 'User-Agent': self.user_agent, 'Authorization': 'Bot ' + self.token } # some checking if it's a JSON request if 'json' in kwargs: headers['Content-Type'] = 'application/json' kwargs['data'] = utils.to_json(kwargs.pop('json')) try: reason = kwargs.pop('reason') except KeyError: pass else: if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') kwargs['headers'] = headers # Proxy support if self.proxy is not None: kwargs['proxy'] = self.proxy if self.proxy_auth is not None: kwargs['proxy_auth'] = self.proxy_auth if route.bucket in self.ratelimits: lock = self.ratelimits[route.bucket] else: lock = self.ratelimits[route.bucket] = asyncio.Lock() # Check if bucket ratelimit was hit and acquire the lock for tries in range(5): if files: for f in files: f.reset(seek=tries) if not self.global_over.is_set(): await self.global_over.wait() await lock.acquire() unlock = True await self.semaphore.acquire() await self.redis.set(f"requests:running", 50 - self.semaphore._value) await self.redis.set(f"requests:waiting", len(self.semaphore._waiters)) try: await self.redis.hincrby(f"requests", f"{route.method}:{route.path}", 1) async with self.__session.request(method, url, **kwargs) as r: await self.redis.hincrby(f"responses", str(r.status), 1) log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), r.status) # even errors have text involved in them so this is safe to call data = await json_or_text(r) # the request was successful so just return the text/json if 300 > r.status >= 200: log.debug('%s %s has received %s', method, url, data) remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == "0": delta = utils._parse_ratelimit_header(r, use_clock=False) log.info('A rate limit bucket has been exhausted (bucket: %s, retry: %s).', bucket, delta) self.loop.call_later(delta, lock.release) unlock = False return data # we are being rate limited elif r.status == 429: await self.redis.hincrby(f"429", route.path, 1) if not r.headers.get('Via'): # Banned by Cloudflare more than likely. raise HTTPException(r, data) # sleep a bit retry_after = data['retry_after'] # check if it's a global rate limit is_global = data.get('global', False) if is_global: log.warning('Global rate limit has been hit. Retrying in %.2f seconds.', retry_after) self.global_over.clear() self.loop.call_later(retry_after, self.global_over.set) else: log.warning( 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"', retry_after, bucket ) self.loop.call_later(retry_after, lock.release) unlock = False continue # we've received a 500 or 502, unconditional retry elif r.status in {500, 502}: await asyncio.sleep(1 + tries * 2) continue # the usual error cases if r.status == 403: raise Forbidden(r, data) elif r.status == 404: raise NotFound(r, data) else: raise HTTPException(r, data) finally: self.semaphore.release() await self.redis.set(f"requests:running", 50 - self.semaphore._value) await self.redis.set(f"requests:waiting", len(self.semaphore._waiters)) if unlock and lock.locked(): lock.release() # We've run out of retries, raise. raise HTTPException(r, data)
async def request(self, route, *, header_bypass_delay=None, **kwargs): bucket = route.bucket method = route.method url = route.url lock = self._locks.get(bucket) if lock is None: lock = asyncio.Lock(loop=self.loop) if bucket is not None: self._locks[bucket] = lock # header creation headers = {"User-Agent": self.user_agent} if self.token is not None: headers["Authorization"] = ("Bot " + self.token if self.bot_token else self.token) # some checking if it's a JSON request if "json" in kwargs: headers["Content-Type"] = "application/json" kwargs["data"] = utils.to_json(kwargs.pop("json")) try: reason = kwargs.pop("reason") except KeyError: pass else: if reason: headers["X-Audit-Log-Reason"] = _uriquote(reason, safe="/ ") kwargs["headers"] = headers # Proxy support if self.proxy is not None: kwargs["proxy"] = self.proxy if self.proxy_auth is not None: kwargs["proxy_auth"] = self.proxy_auth if not self._global_over.is_set(): # wait until the global lock is complete await self._global_over.wait() await lock with MaybeUnlock(lock) as maybe_lock: for tries in range(5): async with self._session.request(method, url, **kwargs) as r: log.debug( "%s %s with %s has returned %s", method, url, kwargs.get("data"), r.status, ) # even errors have text involved in them so this is safe to call data = await json_or_text(r) # check if we have rate limit header information remaining = r.headers.get("X-Ratelimit-Remaining") if remaining == "0" and r.status != 429: # we've depleted our current bucket if header_bypass_delay is None: delta = utils._parse_ratelimit_header(r) else: delta = header_bypass_delay log.debug( "A rate limit bucket has been exhausted (bucket: %s, retry: %s).", bucket, delta, ) maybe_lock.defer() self.loop.call_later(delta, lock.release) # the request was successful so just return the text/json if 300 > r.status >= 200: log.debug("%s %s has received %s", method, url, data) return data # we are being rate limited if r.status == 429: fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"' # sleep a bit retry_after = data["retry_after"] / 1000.0 log.info(fmt, retry_after, bucket) # check if it's a global rate limit is_global = data.get("global", False) if is_global: log.info( "Global rate limit has been hit. Retrying in %.2f seconds.", retry_after, ) self._global_over.clear() await asyncio.sleep(retry_after, loop=self.loop) log.debug( "Done sleeping for the rate limit. Retrying...") # release the global lock now that the # global rate limit has passed if is_global: self._global_over.set() log.debug("Global rate limit is now over.") continue # we've received a 500 or 502, unconditional retry if r.status in {500, 502}: await asyncio.sleep(1 + tries * 2, loop=self.loop) continue # the usual error cases if r.status == 403: raise Forbidden(r, data) elif r.status == 404: raise NotFound(r, data) else: raise HTTPException(r, data) # We've run out of retries, raise. raise HTTPException(r, data)
async def request(self, route, *, files=None, **kwargs): bucket = route.bucket method = route.method url = route.url lock = RatelimitLock(self.redis, bucket) # header creation headers = { 'User-Agent': self.user_agent, 'X-Ratelimit-Precision': 'millisecond', 'Authorization': 'Bot ' + self.token } # some checking if it's a JSON request if 'json' in kwargs: headers['Content-Type'] = 'application/json' kwargs['data'] = utils.to_json(kwargs.pop('json')) try: reason = kwargs.pop('reason') except KeyError: pass else: if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') kwargs['headers'] = headers # Proxy support if self.proxy is not None: kwargs['proxy'] = self.proxy if self.proxy_auth is not None: kwargs['proxy_auth'] = self.proxy_auth # Check if global rate limit was hit while True: delta = await self.redis.pttl("ratelimit:global") if delta < 0: # Key does not exist or key has no ttl break await asyncio.sleep(delta / 1000) # Check if bucket ratelimit was hit and acquire the lock await lock.acquire() async with MaybeUnlock(lock) as maybe_lock: for tries in range(5): if files: for f in files: f.reset(seek=tries) async with self.__session.request(method, url, **kwargs) as r: log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), r.status) # even errors have text involved in them so this is safe to call data = await json_or_text(r) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: # we've depleted our current bucket delta = utils._parse_ratelimit_header(r, use_clock=self.use_clock) log.info('A rate limit bucket has been exhausted (bucket: %s, retry: %s).', bucket, delta) maybe_lock.defer() await lock.release_in(delta) # the request was successful so just return the text/json if 300 > r.status >= 200: log.debug('%s %s has received %s', method, url, data) return data # we are being rate limited if r.status == 429: if not r.headers.get('Via'): # Banned by Cloudflare more than likely. raise HTTPException(r, data) fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"' # sleep a bit retry_after = data['retry_after'] / 1000.0 log.warning(fmt, retry_after, bucket) await lock.release_in(retry_after) # check if it's a global rate limit is_global = data.get('global', False) if is_global: log.warning('Global rate limit has been hit. Retrying in %.2f seconds.', retry_after) await self.redis.setex("ratelimit:global", retry_after, 1) await asyncio.sleep(retry_after) log.debug('Done sleeping for the rate limit. Retrying...') continue # we've received a 500 or 502, unconditional retry if r.status in {500, 502}: await asyncio.sleep(1 + tries * 2) continue # the usual error cases if r.status == 403: raise Forbidden(r, data) elif r.status == 404: raise NotFound(r, data) else: raise HTTPException(r, data) # We've run out of retries, raise. raise HTTPException(r, data)
def request(self, route, *, header_bypass_delay=None, **kwargs): bucket = route.bucket method = route.method url = route.url lock = self._locks.get(bucket) if lock is None: lock = asyncio.Lock(loop=self.loop) if bucket is not None: self._locks[bucket] = lock # header creation headers = { 'User-Agent': self.user_agent, } if self.token is not None: headers['Authorization'] = 'Bot ' + self.token if self.bot_token else self.token # some checking if it's a JSON request if 'json' in kwargs: headers['Content-Type'] = 'application/json' kwargs['data'] = utils.to_json(kwargs.pop('json')) try: reason = kwargs.pop('reason') except KeyError: pass else: if reason: headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ') kwargs['headers'] = headers # Proxy support if self.proxy is not None: kwargs['proxy'] = self.proxy if self.proxy_auth is not None: kwargs['proxy_auth'] = self.proxy_auth if not self._global_over.is_set(): # wait until the global lock is complete yield from self._global_over.wait() yield from lock with MaybeUnlock(lock) as maybe_lock: for tries in range(5): r = yield from self._session.request(method, url, **kwargs) log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), r.status) try: # even errors have text involved in them so this is safe to call data = yield from json_or_text(r) # check if we have rate limit header information remaining = r.headers.get('X-Ratelimit-Remaining') if remaining == '0' and r.status != 429: # we've depleted our current bucket if header_bypass_delay is None: delta = utils._parse_ratelimit_header(r) else: delta = header_bypass_delay log.debug('A rate limit bucket has been exhausted (bucket: %s, retry: %s).', bucket, delta) maybe_lock.defer() self.loop.call_later(delta, lock.release) # the request was successful so just return the text/json if 300 > r.status >= 200: log.debug('%s %s has received %s', method, url, data) return data # we are being rate limited if r.status == 429: fmt = 'We are being rate limited. Retrying in %.2f seconds. Handled under the bucket "%s"' # sleep a bit retry_after = data['retry_after'] / 1000.0 log.info(fmt, retry_after, bucket) # check if it's a global rate limit is_global = data.get('global', False) if is_global: log.info('Global rate limit has been hit. Retrying in %.2f seconds.', retry_after) self._global_over.clear() yield from asyncio.sleep(retry_after, loop=self.loop) log.debug('Done sleeping for the rate limit. Retrying...') # release the global lock now that the # global rate limit has passed if is_global: self._global_over.set() log.debug('Global rate limit is now over.') continue # we've received a 500 or 502, unconditional retry if r.status in {500, 502}: yield from asyncio.sleep(1 + tries * 2, loop=self.loop) continue # the usual error cases if r.status == 403: raise Forbidden(r, data) elif r.status == 404: raise NotFound(r, data) else: raise HTTPException(r, data) finally: # clean-up just in case yield from r.release() # We've run out of retries, raise. raise HTTPException(r, data)