def __init__(self, config=None, raw_data_collection=None): """ config: str, dict or None if str, absolute path (including file name) of the config file. If not provided, config can also be a dictionary of values, or can be None, which will default options. See docs/config.md for more information. raw_data_collection: callback (see AsyncFileCallback) or None if set, enables collection of raw data from exchanges. ALL https/wss traffic from the exchanges will be collected. """ self.feeds = [] self.config = Config(config=config) self.raw_data_collection = None if raw_data_collection: Connection.raw_data_callback = raw_data_collection self.raw_data_collection = raw_data_collection get_logger('feedhandler', self.config.log.filename, self.config.log.level) if self.config.log_msg: LOG.info(self.config.log_msg) if self.config.uvloop: try: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) LOG.info('FH: uvloop initalized') except ImportError: LOG.info("FH: uvloop not initialized")
def __init__(self, retries=10, timeout_interval=10, log_messages_on_error=False, raw_message_capture=None, handler_enabled=True, config=None): """ retries: int number of times the connection will be retried (in the event of a disconnect or other failure) timeout_interval: int number of seconds between checks to see if a feed has timed out log_messages_on_error: boolean if true, log the message from the exchange on exceptions raw_message_capture: callback if defined, callback to save/process/handle raw message (primarily for debugging purposes) handler_enabled: boolean run message handlers (and any registered callbacks) when raw message capture is enabled config: str absolute path (including file name) of the config file. If not provided env var checked first, then local config.yaml """ self.feeds = [] self.retries = retries self.timeout = {} self.last_msg = defaultdict(lambda: None) self.timeout_interval = timeout_interval self.log_messages_on_error = log_messages_on_error self.raw_message_capture = raw_message_capture self.handler_enabled = handler_enabled self.config = Config(file_name=config)
def __init__(self, retries=10, timeout_interval=10, log_messages_on_error=False, raw_message_capture=None, handler_enabled=True, config=None): """ retries: int number of times the connection will be retried (in the event of a disconnect or other failure) timeout_interval: int number of seconds between checks to see if a feed has timed out log_messages_on_error: boolean if true, log the message from the exchange on exceptions raw_message_capture: callback if defined, callback to save/process/handle raw message (primarily for debugging purposes) handler_enabled: boolean run message handlers (and any registered callbacks) when raw message capture is enabled config: str, dict or None if str, absolute path (including file name) of the config file. If not provided, config can also be a dictionary of values, or can be None, which will default options. See docs/config.md for more information. """ self.feeds = [] self.retries = retries self.timeout = {} self.last_msg = defaultdict(lambda: None) self.timeout_interval = timeout_interval self.log_messages_on_error = log_messages_on_error self.raw_message_capture = raw_message_capture self.handler_enabled = handler_enabled self.config = Config(config=config) get_logger('feedhandler', self.config.log.filename, self.config.log.level)
def __init__(self, retries=10, timeout_interval=10, log_messages_on_error=False, raw_message_capture=None, config=None): """ retries: int number of times the connection will be retried (in the event of a disconnect or other failure) timeout_interval: int number of seconds between checks to see if a feed has timed out log_messages_on_error: boolean if true, log the message from the exchange on exceptions raw_message_capture: callback if defined, callback to save/process/handle raw message (primarily for debugging purposes) config: str, dict or None if str, absolute path (including file name) of the config file. If not provided, config can also be a dictionary of values, or can be None, which will default options. See docs/config.md for more information. """ self.feeds = [] self.retries = (retries + 1) if retries >= 0 else -1 self.timeout = {} self.last_msg = defaultdict(lambda: None) self.timeout_interval = timeout_interval self.log_messages_on_error = log_messages_on_error self.raw_message_capture = raw_message_capture self.config = Config(config=config) get_logger('feedhandler', self.config.log.filename, self.config.log.level) if self.config.log_msg: LOG.info(self.config.log_msg) if self.config.uvloop: try: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) LOG.info('FH: uvloop initalized') except ImportError: LOG.info("FH: uvloop not initialized")
def __init__(self, retries=10, timeout_interval=10, log_messages_on_error=False, raw_message_capture=None, config=None, exception_ignore: Optional[List[Exception]] = None): """ retries: int number of times the connection will be retried (in the event of a disconnect or other failure) timeout_interval: int number of seconds between checks to see if a feed has timed out log_messages_on_error: boolean if true, log the message from the exchange on exceptions raw_message_capture: callback if defined, callback to save/process/handle raw message (primarily for debugging purposes) config: str, dict or None if str, absolute path (including file name) of the config file. If not provided, config can also be a dictionary of values, or can be None, which will default options. See docs/config.md for more information. exception_ignore: list, or None an optional list of exceptions that cryptofeed should ignore (i.e. not handle). These will need to be handled by a user-defined exception handler (provided to run run method) or the exception will kill the task (but not the feedhandler). """ self.feeds = [] self.retries = (retries + 1) if retries >= 0 else -1 self.timeout = {} self.last_msg = defaultdict(lambda: None) self.timeout_interval = timeout_interval self.log_messages_on_error = log_messages_on_error self.raw_message_capture = raw_message_capture self.config = Config(config=config) if exception_ignore is not None and not isinstance( exception_ignore, list): raise ValueError( "exception_ignore must be a list of Exceptions or None") self.exceptions = exception_ignore get_logger('feedhandler', self.config.log.filename, self.config.log.level) if self.config.log_msg: LOG.info(self.config.log_msg) if self.config.uvloop: try: import uvloop asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) LOG.info('FH: uvloop initalized') except ImportError: LOG.info("FH: uvloop not initialized")
def __init__(self, config=None, sandbox=False): config = Config(config=config) get_logger('rest', config.rest.log.filename, config.rest.log.level) self.lookup = { 'bitmex': Bitmex(config.bitmex), 'bitfinex': Bitfinex(config.bitfinex), 'coinbase': Coinbase(config.coinbase, sandbox=sandbox), 'poloniex': Poloniex(config.poloniex), 'gemini': Gemini(config.gemini, sandbox=sandbox), 'kraken': Kraken(config.kraken), 'deribit': Deribit(config.deribit), 'binance_futures': BinanceFutures(config.binance_futures), 'binance_delivery': BinanceDelivery(config.binance_delivery), 'ftx': FTX(config.ftx) }
def __init__(self, config=None, sandbox=False, subaccount=None, **kwargs): self.config = Config(config=config) self.sandbox = sandbox self.subaccount = subaccount keys = self.config[self.id.lower()] if self.subaccount is None else self.config[self.id.lower()][self.subaccount] self.key_id = keys.key_id self.key_secret = keys.key_secret self.key_passphrase = keys.key_passphrase self.account_name = keys.account_name self.ignore_invalid_instruments = self.config.ignore_invalid_instruments if not Symbols.populated(self.id): self.symbol_mapping() self.normalized_symbol_mapping, _ = Symbols.get(self.id) self.exchange_symbol_mapping = {value: key for key, value in self.normalized_symbol_mapping.items()}
def __init__(self, config=None, sandbox=False): self.config = Config(file_name=config) lfile = 'rest.log' if not self.config or not self.config.restlog.filename else self.config.restlog.filename level = logging.WARNING if not self.config or not self.config.restlog.level else self.config.restlog.level get_logger('rest', lfile, level) self.lookup = { 'bitmex': Bitmex(config), 'bitfinex': Bitfinex(config), 'coinbase': Coinbase(config, sandbox=sandbox), 'poloniex': Poloniex(config), 'gemini': Gemini(config, sandbox=sandbox), 'kraken': Kraken(config), 'deribit': Deribit(config), 'binance_futures': BinanceFutures(config), 'binance_delivery': BinanceDelivery(config), 'ftx': FTX(config) }
def main(): config = Config( config={'log': { 'filename': 'ingester.log', 'level': 'DEBUG' }}) get_logger('ingester', config.log.filename, config.log.level) f = FeedHandler(config=config) callbacks = { TICKER: DeribitTickerPostgres(**postgres_cfg, cache_size=1000), TRADES: DeribitTradePostgres(**postgres_cfg), L2_BOOK: DeribitBookPostgres(**postgres_cfg, cache_size=1000) } deribit = Deribit(max_depth=2, subscription=get_new_subscription(), callbacks=callbacks) f.add_feed(deribit) f.run(start_loop=True, tasks=[ do_periodically_at( 8, 1, 1, functools.partial(subscribe_to_new_subscription, deribit)) ])
def __init__(self, address: Union[dict, str], timeout=120, timeout_interval=30, retries=10, symbols=None, channels=None, subscription=None, config: Union[Config, dict, str] = None, callbacks=None, max_depth=None, book_interval=1000, snapshot_interval=False, checksum_validation=False, cross_check=False, origin=None, exceptions=None, log_message_on_error=False, sandbox=False): """ address: str, or dict address to be used to create the connection. The address protocol (wss or https) will be used to determine the connection type. Use a "str" to pass one single address, or a dict of option/address timeout: int Time, in seconds, between message to wait before a feed is considered dead and will be restarted. Set to -1 for infinite. timeout_interval: int Time, in seconds, between timeout checks. retries: int Number of times to retry a failed connection. Set to -1 for infinite max_depth: int Maximum number of levels per side to return in book updates book_interval: int Number of updates between snapshots. Only applicable when book deltas are enabled. Book deltas are enabled by subscribing to the book delta callback. snapshot_interval: bool/int Number of updates between snapshots. Only applicable when book delta is not enabled. Updates between snapshots are not delivered to the client checksum_validation: bool Toggle checksum validation, when supported by an exchange. cross_check: bool Toggle a check for a crossed book. Should not be needed on exchanges that support checksums or provide message sequence numbers. origin: str Passed into websocket connect. Sets the origin header. exceptions: list of exceptions These exceptions will not be handled internally and will be passed to the asyncio exception handler. To handle them feedhandler will need to be supplied with a custom exception handler. See the `run` method on FeedHandler, specifically the `exception_handler` keyword argument. log_message_on_error: bool If an exception is encountered in the connection handler, log the raw message sandbox: bool enable sandbox mode for exchanges that support this """ if isinstance(config, Config): LOG.info( '%s: reuse object Config containing the following main keys: %s', self.id, ", ".join(config.config.keys())) self.config = config else: LOG.info('%s: create Config from type: %r', self.id, type(config)) self.config = Config(config) self.sandbox = sandbox self.log_on_error = log_message_on_error self.retries = retries self.exceptions = exceptions self.connection_handlers = [] self.timeout = timeout self.timeout_interval = timeout_interval self.subscription = defaultdict(set) self.address = address self.book_update_interval = book_interval self.snapshot_interval = snapshot_interval self.cross_check = cross_check self.updates = defaultdict(int) self.do_deltas = False self.normalized_symbols = [] self.max_depth = max_depth self.previous_book = defaultdict(dict) self.origin = origin self.checksum_validation = checksum_validation self.ws_defaults = { 'ping_interval': 10, 'ping_timeout': None, 'max_size': 2**23, 'max_queue': None, 'origin': self.origin } self.key_id = os.environ.get(f'CF_{self.id}_KEY_ID') or self.config[ self.id.lower()].key_id self.key_secret = os.environ.get( f'CF_{self.id}_KEY_SECRET') or self.config[ self.id.lower()].key_secret self._feed_config = defaultdict(list) self.http_conn = HTTPAsyncConn(self.id) symbols_cache = Symbols if not symbols_cache.populated(self.id): self.symbol_mapping() self.normalized_symbol_mapping, self.exchange_info = symbols_cache.get( self.id) self.exchange_symbol_mapping = { value: key for key, value in self.normalized_symbol_mapping.items() } if subscription is not None and (symbols is not None or channels is not None): raise ValueError( "Use subscription, or channels and symbols, not both") if subscription is not None: self.channels = list(subscription.keys()) self.symbols = list(subscription[self.channels[0]]) for channel in subscription: chan = feed_to_exchange(self.id, channel) if is_authenticated_channel(channel): if not self.key_id or not self.key_secret: raise ValueError( "Authenticated channel subscribed to, but no auth keys provided" ) self.normalized_symbols.extend(subscription[channel]) self.subscription[chan].update([ self.std_symbol_to_exchange_symbol(symbol) for symbol in subscription[channel] ]) self._feed_config[channel].extend(self.normalized_symbols) if symbols and channels: if any(is_authenticated_channel(chan) for chan in channels): if not self.key_id or not self.key_secret: raise ValueError( "Authenticated channel subscribed to, but no auth keys provided" ) self.channels = channels self.symbols = symbols # if we dont have a subscription dict, we'll use symbols+channels and build one [ self._feed_config[channel].extend(symbols) for channel in channels ] self.normalized_symbols = symbols symbols = [ self.std_symbol_to_exchange_symbol(symbol) for symbol in symbols ] channels = list( set([feed_to_exchange(self.id, chan) for chan in channels])) self.subscription = {chan: symbols for chan in channels} self._feed_config = dict(self._feed_config) self.l3_book = {} self.l2_book = {} self.callbacks = { FUNDING: Callback(None), FUTURES_INDEX: Callback(None), L2_BOOK: Callback(None), L3_BOOK: Callback(None), LIQUIDATIONS: Callback(None), OPEN_INTEREST: Callback(None), MARKET_INFO: Callback(None), TICKER: Callback(None), TRADES: Callback(None), CANDLES: Callback(None), ORDER_INFO: Callback(None) } if callbacks: for cb_type, cb_func in callbacks.items(): self.callbacks[cb_type] = cb_func if cb_type == BOOK_DELTA: self.do_deltas = True for key, callback in self.callbacks.items(): if not isinstance(callback, list): self.callbacks[key] = [callback]
def __init__(self, address: Union[dict, str], sandbox=False, symbols=None, channels=None, subscription=None, config: Union[Config, dict, str] = None, callbacks=None, max_depth=None, book_interval=1000, snapshot_interval=False, checksum_validation=False, cross_check=False, origin=None): """ address: str, or dict address to be used to create the connection. The address protocol (wss or https) will be used to determine the connection type. Use a "str" to pass one single address, or a dict of option/address sandbox: bool For authenticated channels, run against the sandbox websocket (when True) max_depth: int Maximum number of levels per side to return in book updates book_interval: int Number of updates between snapshots. Only applicable when book deltas are enabled. Book deltas are enabled by subscribing to the book delta callback. snapshot_interval: bool/int Number of updates between snapshots. Only applicable when book delta is not enabled. Updates between snapshots are not delivered to the client checksum_validation: bool Toggle checksum validation, when supported by an exchange. cross_check: bool Toggle a check for a crossed book. Should not be needed on exchanges that support checksums or provide message sequence numbers. origin: str Passed into websocket connect. Sets the origin header. """ if isinstance(config, Config): LOG.info( '%s: reuse object Config containing the following main keys: %s', self.id, ", ".join(config.config.keys())) self.config = config else: LOG.info('%s: create Config from type: %r', self.id, type(config)) self.config = Config(config) self.subscription = defaultdict(set) self.address = address self.book_update_interval = book_interval self.snapshot_interval = snapshot_interval self.cross_check = cross_check self.updates = defaultdict(int) self.do_deltas = False self.symbols = [] self.normalized_symbols = [] self.channels = [] self.max_depth = max_depth self.previous_book = defaultdict(dict) self.origin = origin self.checksum_validation = checksum_validation self.ws_defaults = { 'ping_interval': 10, 'ping_timeout': None, 'max_size': 2**23, 'max_queue': None, 'origin': self.origin } self.key_id = os.environ.get(f'CF_{self.id}_KEY_ID') or self.config[ self.id.lower()].key_id self.key_secret = os.environ.get( f'CF_{self.id}_KEY_SECRET') or self.config[ self.id.lower()].key_secret self._feed_config = defaultdict(list) load_exchange_symbol_mapping(self.id, key_id=self.key_id) if subscription is not None and (symbols is not None or channels is not None): raise ValueError( "Use subscription, or channels and symbols, not both") if subscription is not None: for channel in subscription: chan = feed_to_exchange(self.id, channel) if is_authenticated_channel(channel): if not self.key_id or not self.key_secret: raise ValueError( "Authenticated channel subscribed to, but no auth keys provided" ) self.normalized_symbols.extend(subscription[channel]) self.subscription[chan].update([ symbol_std_to_exchange(symbol, self.id) for symbol in subscription[channel] ]) self._feed_config[channel].extend(self.normalized_symbols) if symbols: self.normalized_symbols = symbols self.symbols = [ symbol_std_to_exchange(symbol, self.id) for symbol in symbols ] if channels: self.channels = list( set([feed_to_exchange(self.id, chan) for chan in channels])) [ self._feed_config[channel].extend(self.normalized_symbols) for channel in channels ] if any(is_authenticated_channel(chan) for chan in channels): if not self.key_id or not self.key_secret: raise ValueError( "Authenticated channel subscribed to, but no auth keys provided" ) self._feed_config = dict(self._feed_config) self.l3_book = {} self.l2_book = {} self.callbacks = { FUNDING: Callback(None), FUTURES_INDEX: Callback(None), L2_BOOK: Callback(None), L3_BOOK: Callback(None), LIQUIDATIONS: Callback(None), OPEN_INTEREST: Callback(None), MARKET_INFO: Callback(None), TICKER: Callback(None), TRADES: Callback(None), TRANSACTIONS: Callback(None), VOLUME: Callback(None), CANDLES: Callback(None), ORDER_INFO: Callback(None) } if callbacks: for cb_type, cb_func in callbacks.items(): self.callbacks[cb_type] = cb_func if cb_type == BOOK_DELTA: self.do_deltas = True for key, callback in self.callbacks.items(): if not isinstance(callback, list): self.callbacks[key] = [callback]
def __init__(self, config=None, sandbox=False): self.mapped = False self.key_id, self.key_secret, self.key_passphrase = None, None, None self.sandbox = sandbox self.config = Config(file_name=config)
def __init__(self, address: Union[dict, str], symbols=None, channels=None, subscription=None, config: Union[Config, dict, str] = None, callbacks=None, max_depth=None, book_interval=1000, snapshot_interval=False, checksum_validation=False, cross_check=False, origin=None): """ max_depth: int Maximum number of levels per side to return in book updates book_interval: int Number of updates between snapshots. Only applicable when book deltas are enabled. Book deltas are enabled by subscribing to the book delta callback. snapshot_interval: bool/int Number of updates between snapshots. Only applicable when book delta is not enabled. Updates between snapshots are not delivered to the client checksum_validation: bool Toggle checksum validation, when supported by an exchange. cross_check: bool Toggle a check for a crossed book. Should not be needed on exchanges that support checksums or provide message sequence numbers. origin: str Passed into websocket connect. Sets the origin header. """ if isinstance(config, Config): self.config = config else: self.config = Config(config) self.subscription = defaultdict(set) self.address = address self.book_update_interval = book_interval self.snapshot_interval = snapshot_interval self.cross_check = cross_check self.updates = defaultdict(int) self.do_deltas = False self.symbols = [] self.normalized_symbols = [] self.channels = [] self.max_depth = max_depth self.previous_book = defaultdict(dict) self.origin = origin self.checksum_validation = checksum_validation self.ws_defaults = { 'ping_interval': 10, 'ping_timeout': None, 'max_size': 2**23, 'max_queue': None, 'origin': self.origin } key_id = self.config[self.id.lower()].key_id load_exchange_symbol_mapping(self.id, key_id=key_id) if subscription is not None and (symbols is not None or channels is not None): raise ValueError( "Use subscription, or channels and symbols, not both") if subscription is not None: for channel in subscription: chan = feed_to_exchange(self.id, channel) self.subscription[chan].update([ symbol_std_to_exchange(symbol, self.id) for symbol in subscription[channel] ]) self.normalized_symbols.extend(self.subscription[chan]) if symbols: self.normalized_symbols = symbols self.symbols = [ symbol_std_to_exchange(symbol, self.id) for symbol in symbols ] if channels: self.channels = list( set([feed_to_exchange(self.id, chan) for chan in channels])) self.l3_book = {} self.l2_book = {} self.callbacks = { FUNDING: Callback(None), FUTURES_INDEX: Callback(None), L2_BOOK: Callback(None), L3_BOOK: Callback(None), LIQUIDATIONS: Callback(None), OPEN_INTEREST: Callback(None), MARKET_INFO: Callback(None), TICKER: Callback(None), TRADES: Callback(None), TRANSACTIONS: Callback(None), VOLUME: Callback(None) } if callbacks: for cb_type, cb_func in callbacks.items(): self.callbacks[cb_type] = cb_func if cb_type == BOOK_DELTA: self.do_deltas = True for key, callback in self.callbacks.items(): if not isinstance(callback, list): self.callbacks[key] = [callback]