def __init__(self, config={}): ccxt.kraken.__init__(self, config=config) exchange.Exchange.__init__(self) self.channels[self.TICKER]['ex_name'] = 'ticker' self.channels[self.TRADES]['ex_name'] = 'trade' self.channels[self.ORDER_BOOK]['ex_name'] = 'book' self.channels[self.OHLCVS]['ex_name'] = 'ohlc' self.channels[self.TICKER]['has'] = True self.channels[self.TRADES]['has'] = True self.channels[self.ORDER_BOOK]['has'] = True self.channels[self.OHLCVS]['has'] = True self.channels_by_ex_name = self.create_channels_by_ex_name() # Maximum number of channels per connection. # Unlimited if equal to 10 ** 5. self.max_channels = 45 # Number of connections that can be created per unit time, # where the unit of time is in milliseconds. # Example: AsyncLimiter(1, 60000 / 1000) --> one connection per minute # Unlimited if equal to (10 ** 5, 60000). self.max_connections = { 'public': AsyncLimiter(10 ** 5, 60000 / 1000), 'private': AsyncLimiter(1, 60000 / 1000) } self.ws_endpoint = { 'public': 'wss://ws.kraken.com', 'private': '' } self.event = 'event' self.errors = ['error'] self.subscribed = 'subscribed' # All message events that are not unified. self.others = ['subscriptionStatus', 'systemStatus', 'heartbeat']
async def test_acquire(event_loop, task): current_time = 0 def mocked_time(): return current_time # capacity released every 2 seconds limiter = AsyncLimiter(5, 10) with mock.patch.object(event_loop, "time", mocked_time): tasks = [asyncio.ensure_future(task(limiter)) for _ in range(10)] pending = await wait_for_n_done(tasks, 5) assert len(pending) == 5 current_time = 3 # releases capacity for one and some buffer assert limiter.has_capacity() pending = await wait_for_n_done(pending, 1) assert len(pending) == 4 current_time = 7 # releases capacity for two more, plus buffer pending = await wait_for_n_done(pending, 2) assert len(pending) == 2 current_time = 11 # releases the remainder pending = await wait_for_n_done(pending, 2) assert len(pending) == 0
def __init__(self, config={}): ccxt.bitvavo.__init__(self, config=config) exchange.Exchange.__init__(self) self.channels[self.TICKER]['ex_name'] = 'ticker24h' self.channels[self.TRADES]['ex_name'] = 'trades' self.channels[self.ORDER_BOOK]['ex_name'] = 'book' self.channels[self.OHLCVS]['ex_name'] = 'candles' self.channels[self.TICKER]['has'] = True self.channels[self.TRADES]['has'] = True self.channels[self.ORDER_BOOK]['has'] = True self.channels[self.OHLCVS]['has'] = True self.channels_by_ex_name = self.create_channels_by_ex_name() # Maximum number of channels per connection. # Unlimited if equal to 10 ** 5. self.max_channels = 10 ** 5 # Number of connections that can be created per unit time, # where the unit of time is in milliseconds. # Example: AsyncLimiter(1, 60000 / 1000) --> one connection per minute # Unlimited if equal to (10 ** 5, 60000). self.max_connections = { 'public': AsyncLimiter(10 ** 5, 60000 / 1000), 'private': AsyncLimiter(1, 60000 / 1000) } self.ws_endpoint = { 'public': 'wss://ws.bitvavo.com/v2/', 'private': '' } self.event = 'event' self.subscribed = 'subscribed'
async def test_has_capacity(): limiter = AsyncLimiter(1) assert limiter.has_capacity() assert not limiter.has_capacity(42) await limiter.acquire() assert not limiter.has_capacity()
def __init__(self, config={}): ccxt.coinbasepro.__init__(self, config=config) exchange.Exchange.__init__(self) self.channels[self.TICKER]['ex_name'] = 'ticker' self.channels[self.TRADES]['ex_name'] = 'matches' self.channels[self.ORDER_BOOK]['ex_name'] = 'level2' self.channels[self.TICKER]['has'] = True self.channels[self.TRADES]['has'] = True self.channels[self.ORDER_BOOK]['has'] = True self.channels_by_ex_name = self.create_channels_by_ex_name() # Maximum number of channels per connection. # Unlimited if equal to 10 ** 5. self.max_channels = 10**5 # Number of connections that can be created per unit time, # where the unit of time is in milliseconds. # Example: AsyncLimiter(1, 60000 / 1000) --> one connection per minute # Unlimited if equal to (10 ** 5, 60000). self.max_connections = { 'public': AsyncLimiter(1, 4000 / 1000), 'private': AsyncLimiter(1, 60000 / 1000) } self.ws_endpoint = { 'public': 'wss://ws-feed.pro.coinbase.com', 'private': '' } self.event = 'type' self.subscribed = 'subscriptions'
def _get_limiter(self, url: str) -> AsyncLimiter: host = urlparse(url).netloc limiter = self._limiters.get(host) if not limiter: limiter = self._limiters[host] = AsyncLimiter( 1, self.request_delay) return limiter
def __init__(self): # pylint: disable=unused-argument async def on_request_start( session: ClientSession, trace_config_ctx: SimpleNamespace, params: TraceRequestStartParams, ) -> None: current_attempt = trace_config_ctx.trace_request_ctx[ "current_attempt"] if current_attempt > 1: LOG.info("iNat request attempt #%d: %s", current_attempt, repr(params)) trace_config = TraceConfig() trace_config.on_request_start.append(on_request_start) self.session = RetryClient( raise_for_status=False, trace_configs=[trace_config], ) self.request_time = time() self.places_cache = {} self.projects_cache = {} self.users_cache = {} self.users_login_cache = {} self.taxa_cache = {} # api_v1_limiter: # --------------- # - Allow a burst of 60 requests (i.e. equal to max_rate) in the initial # seconds of the 60 second time_period before enforcing a rate limit of # 60 requests per minute (max_rate). # - This honours "try to keep it to 60 requests per minute or lower": # - https://api.inaturalist.org/v1/docs/ # - Since the iNat API doesn't throttle until 100 requests per minute, # this should ensure we never get throttled. self.api_v1_limiter = AsyncLimiter(60, 60)
async def main(): limiter = AsyncLimiter(40, 1) tasks = set() # load the image IDs we've already processed from previous runs img_ids = set( os.path.basename(s)[6:].split(".")[0] for s in glob.glob(os.path.join(OUTPUT_PATH, "image_*")) ) async with NASA_API as napi: async with aiohttp.ClientSession() as session: async with aioboto3.client("rekognition") as rk: # the lower progress par represents the progress of the `rekognize` tasks pbar = tqdm.tqdm(position=1, total=0) # these search parameters can be changed to get a variety of images search = await napi.search(center="JSC", media_type="image", q="dock") # the upper progress bar represents the progress of the NASA API search async for item in tqdm.asyncio.tqdm(search, position=0): # skip images that have already been processed if item["nasa_id"] not in img_ids: img_ids.add(item["nasa_id"]) # enqueue a task that will fetch the image data, run AWS recoknition, # and then write the output to disk tasks.add( asyncio.create_task( rekognize(session, rk, pbar, limiter, item) ) ) pbar.total += 1 # wait for all rekognition tasks to finish await asyncio.gather(*tasks)
def __init__(self, provider_id, requests_per_minute=DEFAULT_REQUESTS_PER_MINUTE, *args, **kwargs): super().__init__(provider_id, *args, **kwargs) self._limiter = AsyncLimiter(requests_per_minute, 60)
def __init__(self): self.channels = { self.TICKER: { 'ex_name': '', 'has': False, 'parse': self.parse_ticker_ws }, self.TRADES: { 'ex_name': '', 'has': False, 'parse': self.parse_trades_ws }, self.ORDER_BOOK: { 'ex_name': '', 'has': False, 'parse': self.parse_order_book_ws }, self.OHLCVS: { 'ex_name': '', 'has': False, 'parse': self.parse_ohlcvs_ws } } self.channels_by_ex_name = self.create_channels_by_ex_name() self.max_channels = 0 # Maximum number of channels per connection. # Number of connections that can be created per unit time, # where the unit of time is in milliseconds. # Example: AsyncLimiter(1, 60000 / 1000) --> one connection per minute self.max_connections = { 'public': AsyncLimiter(1, 60000 / 1000), 'private': AsyncLimiter(1, 60000 / 1000) } self.connections = {} self.result = asyncio.Queue(maxsize=1) self.ws_endpoint = {'public': '', 'private': ''} self.event = '' self.subscribed = '' self.errors = {} self.order_book = {} # All message events that are not unified. self.others = []
def __init__(self, config={}): ccxt.bitfinex2.__init__(self, config=config) exchange.Exchange.__init__(self) self.channels[self.TICKER]['ex_name'] = 'ticker' self.channels[self.TRADES]['ex_name'] = 'trades' self.channels[self.ORDER_BOOK]['ex_name'] = 'book' self.channels[self.OHLCVS]['ex_name'] = 'candles' self.channels[self.TICKER]['has'] = True self.channels[self.TRADES]['has'] = True self.channels[self.ORDER_BOOK]['has'] = True self.channels[self.OHLCVS]['has'] = True self.channels_by_ex_name = self.create_channels_by_ex_name() # Maximum number of channels per connection. # Unlimited if equal to 10 ** 5. self.max_channels = 25 # Number of connections that can be created per unit time, # where the unit of time is in milliseconds. # Example: AsyncLimiter(1, 60000 / 1000) --> one connection per minute # Unlimited if equal to (10 ** 5, 60000). self.max_connections = { 'public': AsyncLimiter(20, 60000 / 1000), 'private': AsyncLimiter(1, 60000 / 1000) } self.ws_endpoint = { 'public': 'wss://api-pub.bitfinex.com/ws/2', 'private': 'wss://api.bitfinex.com/ws/2' } self.event = 'event' self.subscribed = 'subscribed' self.errors = { 10000: BaseError('Unknown event.'), 10001: ExchangeError('Unknown pair.'), 10300: SubscribeError, 10301: SubscribeError('Already subscribed.'), 10302: SubscribeError('Unknown channel.'), 10305: ChannelLimitExceeded, 10400: UnsubscribeError, 10401: UnsubscribeError('Not subscribed.'), } # All message events that are not unified. self.others = ['info']
def __init__(self, concurrency_limit, requests_per_second): """ :param concurrency_limit: number of connections allowed to be open at once :param requests_per_second: maximum average requests per second """ self.concurrency_limit = concurrency_limit self.requests_per_second = requests_per_second self.rate_limiter = AsyncLimiter( self.concurrency_limit, self.concurrency_limit / self.requests_per_second) self.session = None self._entered_count = 0
def __init__(self, dev_id=None, auth_key=None): if dev_id is None and auth_key is None: self.dev_id = config.dev_id self.auth_key = config.auth_id else: self.dev_id = dev_id self.auth_key = auth_key self.session_manager = SessionManager(api_info.DAILY_REQUEST_LIMIT, api_info.DAILY_SESSION_LIMIT, api_info.SESSION_DURATION) self.endpoint = "http://api.paladins.com/paladinsapi.svc/" self.refreshing = False self.client_session = None self.rate_limiter = AsyncLimiter(1, 1)
async def loopOverRange(input_range): print(f"Starting on range {input_range}...") rate_limit = AsyncLimiter(4, 5) process = pipeline([mySafeComputation, lambda x: Right(x * 3), mustBeEven]) computations = [process(i) for i in input_range] allResults = await asyncio.gather( *[throttle(c, rate_limit) for c in computations]) print(allResults) return allResults
async def fetch(self): limiter = AsyncLimiter(15, 1) task = [] async with aiohttp.ClientSession() as session: async with limiter: LP = RaydiumPoolInfo(session) fee_apy_task = asyncio.create_task(LP.RAYDIUM.get_pair()) fee_apy = await fee_apy_task for farm in LP.farms_info: task.append(LP.get_APR(farm)) result = await asyncio.gather(*task) for farm in result: farm_name = list(farm.keys())[0] farm[farm_name].update({"Fee_APR": fee_apy[farm_name]}) pprint(farm)
async def main(): print("Starting...") process = pipeline([mySafeComputation, lambda x: Right(x*3), mustBeEven]) computations = [process(i) for i in range(50)] rate_limit = AsyncLimiter(max_rate=10, time_period=10) await asyncio.sleep(1) print("Compuration instantiated...") allResults = await asyncio.gather(*[throttle(c, rate_limit) for c in computations]) print(allResults) return allResults
async def main(loop, num_consumers): global async_clint, async_limit conn = aiohttp.TCPConnector(verify_ssl=False, limit=100, use_dns_cache=True) async_clint = aiohttp.ClientSession(loop=loop, connector=conn, conn_timeout=30, read_timeout=30) async_limit = AsyncLimiter(20, 0.1) # congif config = configparser.ConfigParser() config.read('config.ini', encoding='utf-8') data_dir = config.get('amap_spider', 'data_path') key = config.get('amap_spider', 'key') poi_types = json.loads(config.get('amap_spider', 'poi_types')) target_rect = json.loads(config.get('amap_spider', 'target_rect')) # do query async with async_clint: rect = Rec(*target_rect) task_queue = Queue() output = dict() init = loop.create_task( init_task_queue(key, rect, poi_types, task_queue, output)) tasks = [ loop.create_task(query_pois(task_queue, output)) for i in range(100) ] await asyncio.wait(tasks + [init]) # save data if not os.path.exists(data_dir): os.makedirs(data_dir) file_date = datetime.datetime.now().strftime("%Y%m%d") await save_data(output.values(), '%s/metro_%s.json' % (data_dir, file_date)) print(len(output))
def __init__(self, cache: bool) -> None: self._logger = logging.getLogger('dipdup.coinbase') self._proxy = DatasourceRequestProxy( cache=cache, ratelimiter=AsyncLimiter(max_rate=10, time_period=1), )
class NexusMods: """ Nexus Mods Public API Documentation: https://app.swaggerhub.com/apis-docs/NexusMods/nexus-mods_public_api_params_in_form_data/1.0 """ BASE_URL: ClassVar[str] = "https://api.nexusmods.com/v1" USER_AGENT: ClassVar[str] = "{}/{} ({}; {}) {}/{}".format( aionexusmods.__name__, aionexusmods.__version__, platform.platform(), platform.architecture()[0], platform.python_implementation(), platform.python_version(), ) game_domain_name: str def __init__(self, api_key: str, game_domain_name: str): self.game_domain_name = game_domain_name self._api_key = api_key self._session = None # # Nexus Mods Public Api - Mods # async def get_mod_updates(self, period: str) -> list[ModUpdate]: """ Returns a list of mods that have been updated in a given period, with timestamps of their last update. Cached for 5 minutes. The only accepted periods are '1d', '1w' and '1m' (1 day, 1 week and 1 month). """ json: _JsonDict = {"period": period} result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/updated.json", json=json) return parse_raw_as(list[ModUpdate], result) async def get_mod_changelogs(self, mod_id: int) -> dict[str, list[str]]: """ Returns a list of changelogs for the specified mod. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/{mod_id}/changelogs.json" ) return parse_raw_as(dict[str, list[str]], result) async def get_latest_added_mods(self) -> list[Mod]: """ Returns the 10 latest added mods. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/latest_added.json" ) return parse_raw_as(list[Mod], result) async def get_latest_updated_mods(self) -> list[Mod]: """ Returns the 10 latest updated mods. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/latest_updated.json" ) return parse_raw_as(list[Mod], result) async def get_trending_mods(self) -> list[Mod]: """ Returns 10 trending mods. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/trending.json" ) return parse_raw_as(list[Mod], result) async def get_mod(self, mod_id: int) -> Mod: """ Returns a specified mod. Cached for 5 minutes. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/{mod_id}.json" ) return parse_raw_as(Mod, result) async def get_md5_search(self, md5_hash: str) -> list[tuple[Mod, File]]: """ Returns a list of mod files for the given MD5 file hash. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/md5_search/{md5_hash}.json" ) parsed = parse_raw_as(list[SearchResult], result) return [(p.mod, p.file_details) for p in parsed] async def set_endorsed(self, mod_id: int, version: str, endorsed: bool) -> Status: """Endorse or unendorse a mod.""" json: _JsonDict = {"version": version} if endorsed: result = await self._post( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/{mod_id}/endorse.json", json=json, ) else: result = await self._post( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/{mod_id}/abstain.json", json=json, ) return parse_raw_as(Status, result) # # Nexus Mods Public Api - Mod Files # async def get_files_and_updates( self, mod_id: int) -> tuple[list[File], list[FileUpdate]]: """ Returns a list of files for the specified mod. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/{mod_id}/files.json" ) parsed = parse_raw_as(FilesResult, result) return parsed.files, parsed.file_updates async def get_file(self, mod_id: int, file_id: int) -> File: """ Returns the specified file for the specified mod. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/{mod_id}/files/{file_id}.json" ) return parse_raw_as(File, result) async def get_download_links(self, mod_id: int, file_id: int) -> list[DownloadLink]: """ Returns a generated download link for the specified mod file. """ result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}/mods/{mod_id}/files/{file_id}/download_link.json" ) return parse_raw_as(list[DownloadLink], result) # # Nexus Mods Public Api - Games # async def get_games(self) -> list[Game]: """Returns a list of all games.""" result = await self._get(f"{self.BASE_URL}/games.json") return parse_raw_as(list[Game], result) async def get_game(self) -> Game: """Returns the specified game.""" result = await self._get( f"{self.BASE_URL}/games/{self.game_domain_name}.json") return parse_raw_as(Game, result) # # Nexus Mods Public Api - User # async def get_user(self) -> User: """Returns the current user.""" result = await self._get(f"{self.BASE_URL}/users/validate.json") return parse_raw_as(User, result) async def get_tracked_mods(self) -> list[TrackedMod]: """Returns all the mods being tracked by the current user.""" result = await self._get(f"{self.BASE_URL}/user/tracked_mods.json") return parse_raw_as(list[TrackedMod], result) async def set_tracked(self, mod_id: int, tracked: bool) -> Message: """Track or untrack a mod.""" json: _JsonDict = { "domain_name": self.game_domain_name, "mod_id": mod_id } if tracked: result = await self._post( f"{self.BASE_URL}/user/tracked_mods.json", json=json) else: result = await self._delete( f"{self.BASE_URL}/user/tracked_mods.json", json=json) return parse_raw_as(Message, result) async def get_endorsements(self) -> list[Endorsement]: """Returns a list of all endorsements for the current user.""" result = await self._get(f"{self.BASE_URL}/user/endorsements.json") return parse_raw_as(list[Endorsement], result) # # Nexus Mods Public Api - Colour Schemes # async def get_colour_schemes(self) -> list[ColourScheme]: """ Returns list of all colour schemes, including the primary, secondary and 'darker' colours. """ result = await self._get(f"{self.BASE_URL}/colourschemes.json") return parse_raw_as(list[ColourScheme], result) # # Nexus Mods Public Api - Extras # async def get_content_preview(self, content_preview_link: str) -> ContentPreview: """ Returns the results from the specified content preview link. """ result = await self._get(content_preview_link) return parse_raw_as(ContentPreview, result) async def download(self, download_link: str, path: Union[str, PathLike[str]]) -> None: """ Downloads the contents from the specified download link to the specified path. """ from os.path import dirname from aiofiles.os import mkdir from aiofiles import open try: await mkdir(dirname(path)) except (FileExistsError, FileNotFoundError): pass async with open(path, "wb") as f: async for chunk in self._get_iter_chunks(download_link): await f.write(chunk) # # Implementation Details # _api_key: str _session: Optional[ClientSession] _limiter: ClassVar[AsyncLimiter] = AsyncLimiter(3600 / 28) # limit to 28 per sec def _active_session(self) -> ClientSession: if self._session is None: raise RuntimeError( "attempted to use a session before it was started") if self._session.closed: raise RuntimeError( "attempted to use a session after it was closed") return self._session async def __aenter__(self) -> NexusMods: if self._session and not self._session.closed: raise RuntimeError( "attemped to start a new session before closing the previous one" ) self._session = ClientSession( headers={ "apikey": self._api_key, "user-agent": self.USER_AGENT, "content-type": "application/json", }, raise_for_status=True, connector=TCPConnector(limit_per_host=28), ) return self async def __aexit__(self, *args): # type: ignore[no-untyped-def] await self._active_session().close() async def _get(self, url: str, json: Optional[_JsonDict] = None) -> bytes: async with self._limiter: async with self._active_session().get(url, json=json) as response: return await response.read() async def _post(self, url: str, json: Optional[_JsonDict] = None) -> bytes: async with self._limiter: async with self._active_session().post(url, json=json) as response: return await response.read() async def _delete(self, url: str, json: Optional[_JsonDict] = None) -> bytes: async with self._limiter: async with self._active_session().delete(url, json=json) as response: return await response.read() async def _get_iter_chunks(self, url: str) -> AsyncIterator[bytes]: async with self._limiter: async with self._active_session().get(url) as response: while True: chunk = await response.content.read(1024 * 1024 * 12 ) # 12 MB if chunk: yield chunk else: break
merged.setdefault(rp, set()).update(rs) p.rdepends = merged async def update_arch_mapping() -> None: print("update arch mapping") url = ARCH_MAPPING_CONFIG[0][0] print("Loading %r" % url) data = await get_content_cached(url, timeout=REQUEST_TIMEOUT) state.arch_mapping = ArchMapping(json.loads(data)) _rate_limit = AsyncLimiter(1, UPDATE_INTERVAL_MIN) @functools.lru_cache(maxsize=None) def _get_update_event() -> Event: return Event() async def wait_for_update() -> None: update_event = _get_update_event() await update_event.wait() update_event.clear() def queue_update() -> None: update_event = _get_update_event()
def test_attributes(): limiter = AsyncLimiter(42, 81) assert limiter.max_rate == 42 assert limiter.time_period == 81
import feedparser import sentry_sdk from aiolimiter import AsyncLimiter from discord.ext import tasks from dispander import dispand from discord_together import DiscordTogether import settings TOKEN = settings.TOKEN DSN = settings.SENTRY_DSN intents = discord.Intents.all() client = discord.Client(intents=intents) togetherControl = DiscordTogether(token=settings.TOKEN) sentry_sdk.init(DSN, traces_sample_rate=1.0) server_join_ratelimit = AsyncLimiter(time_period=10, max_rate=10) invite_link_ratelimit = AsyncLimiter(time_period=3600, max_rate=2) url_ratelimit = AsyncLimiter(time_period=60, max_rate=4) message_ratelimit = AsyncLimiter(time_period=10, max_rate=20) mildom_status = {} heart_beat = {} mention_dict = { 484103635895058432: '<@&718449500729114664>', 484103660742115363: '<@&718449761409302580>', 484104086472491020: '<@&718450891744870530>', 484104317410738177: '<@&718450954613162015>', 484104150959783936: '<@&718451051102994473>', 484104415612239872: '<@&718451257332858920>', 484104516934041615: '<@&718451718106382417>', 571440864761741325: '<@&718451311393243139>', 647688309325168651: '<@&718451366699466753>',
async def test_over_acquire(): limiter = AsyncLimiter(1) with pytest.raises(ValueError): await limiter.acquire(42)
def __init__(self, item_list: ItemList): self.item_list = item_list self.throttler = ThrottlerEnsemble([AsyncLimiter(10, 1)])
def __init__(self, prevent_rate_limits: bool): self.global_limiter = AsyncLimiter( 20, 1) if prevent_rate_limits is True else AsyncNonLimiter()