async def _run(self) -> None: try: while not self._ws.closed: message = await self._ws.receive() logger.debug( _("Receive message from bitmex:{}").format(message.data)) if message.type in (WSMsgType.CLOSE, WSMsgType.CLOSING): continue elif message.type == WSMsgType.CLOSED: break decode_message = json.loads(message.data) self._on_message(decode_message) # call strategy method # websocket first package is not a normal package , so we use 'limit' to skip it # TODO trigger on trade and tick # if decode_message.get('action'): # if decode_message.get('table') == 'execution': # start = time.time() # ret = self.strategy.on_trade(message=decode_message) # if asyncio.iscoroutine(ret): # await ret # logger.debug(_('User on_trade process time: {}').format(round(time.time() - start, 7))) # else: # start = time.time() # ret = self.strategy.tick(message=decode_message) # if asyncio.iscoroutine(ret): # await ret # logger.debug(_('User tick process time: {}').format(round(time.time() - start, 7))) except asyncio.CancelledError: logger.warning(_('Your bitmex handler has been stopped'))
def process_one_point(self, point: DatePoint) -> None: logger.info( _('Downloading {} data on {}').format(self.kind, point.value.isoformat())) qstream = self.Streamer(point=point) qstream.process() logger.info( _('Finished downloading {} data on {}').format( self.kind, point.value.isoformat()))
async def _ping(self) -> None: try: while not self._ws.closed: if time.time() - self._last_comm_time > INTERVAL_FACTOR: logger.debug( _('No communication during {} seconds. Send ping signal to keep connection open' ).format(INTERVAL_FACTOR)) await self._ws.ping() self._last_comm_time = time.time() await asyncio.sleep(INTERVAL_FACTOR) except asyncio.CancelledError: logger.warning(_('Your bitmex ping task has been stopped'))
def __iter__(self) -> Iterator[KlinePoint]: try: trade_hdf = pandas.HDFStore(self.input_file, 'r') except OSError: # not exist raise DataDownloadError( _("The required trade.hdf doesn't exist. Download the kline data of Bitmex need " "the Bitmex trade data.You have to download the trade data first." "Run 'monktrader download --kind trade'")) keys = trade_hdf.keys() trade_hdf.close() for key in keys: if os.path.exists(self.output_file): logger.info(_("Updating new kline data from new trade data.")) try: kline_hdf = pandas.HDFStore(self.output_file, 'r') last = kline_hdf.select_column(key, 'index', start=-1) last_time = last[0] start_time = last_time + relativedelta(days=1) except KeyError: # not exist start_time = START_DATE finally: kline_hdf.close() else: logger.info( _("You don't have any kline data. We are going to " "generate the kline data from scratch")) start_time = START_DATE logger.info( _("Generating kline data {} now from date {}.").format( key, start_time)) found = False iter_df = pandas.read_hdf( self.input_file, key, where="index>=datetime.datetime({},{},{})".format( start_time.year, start_time.month, start_time.day), columns=['price', 'homeNotional', 'foreignNotional'], chunksize=HDF_TRADE_TO_KLINE_CHUNK_SIZE, iterator=True) for df in iter_df: yield KlinePoint(df, key) found = True else: if found: # finally yield an end point to process the cache last date yield KlinePoint(None, key) logger.info( _("Successfully generate kline data " "{}").format(key))
def process(self) -> None: try: if self.kind == 'trade': dataframe = read_trade_tar(self._stream_requests(self.url), index='timestamp') elif self.kind == 'quote': dataframe = read_quote_tar(self._stream_requests(self.url), index='timestamp') cla_df = classify_df(dataframe, 'symbol') for key, df in cla_df.items(): self.processed_key.add(key) df.to_hdf(self.dst_file, key, mode='a', format='table', data_columns=True, index=False, complib=HDF_FILE_COMPRESS_LIB, complevel=HDF_FILE_COMPRESS_LEVEL, append=True) except Exception as e: self.rollback() logger.exception( _("Exception #{}# happened when process {} {}").format( e, self.url, self.dst_file)) raise DataDownloadError()
def __init__(self, kind: str, mode: str, dst_dir: str): logger.info(_('Start downloading the data')) self.mode = mode self.kind = kind self.dst_dir = dst_dir self.init_kind(mode, kind) self.init_mode(dst_dir) self.Streamer: Type[DownloadProcess]
def do_all(self) -> None: try: for point in iter(self.process_points()): self.process_one_point(point) except DataDownloadError: logger.info( _('some exception occured when you download data at point {}. Check!!' ).format(point.value)) self.last()
def startstrategy(ctx: click.Context, name: str, directory: str) -> None: directory = os.path.abspath(directory) assert os.path.isdir(directory), _( 'You have to provide an exist directory') template_dir = os.path.join(monkq.__path__[0], 'config', 'project_template') # type: ignore target_dir = os.path.join(directory, name) if os.path.exists(target_dir): raise CommandError(_("The project name has already been used")) assure_dir(target_dir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): relative_dir = root[prefix_length:] if relative_dir: create_dir = os.path.join(target_dir, relative_dir) if not os.path.exists(create_dir): os.mkdir(create_dir) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc')): # Ignore some files as they cause various breakages. continue old_path = os.path.join(root, filename) if filename.endswith(('.py-tpl')): filename = filename.replace('.py-tpl', '.py') filename = filename.replace('@name@', name) new_path = os.path.join(target_dir, relative_dir, filename) with open(old_path) as template_f: content = template_f.read() content = content.replace("@name@", name) with open(new_path, 'w') as new_file: new_file.write(content) else: new_path = os.path.join(target_dir, relative_dir, filename) shutil.copyfile(old_path, new_path) shutil.copymode(old_path, new_path) make_writable(new_path)
def get(self, key: str) -> pandas.DataFrame: if key in self._cached: return self._cached[key] else: try: df = pandas.read_hdf(self.hdf_path, key) self._cached[key] = df return df except KeyError: raise DataError(_("Not found hdf data {} in {}").format(key, self.hdf_path))
def process(self) -> None: try: with open(self.dst_file, 'wb') as f: for chunk in self._stream_requests(self.url): f.write(chunk) except Exception as e: self.rollback() logger.exception( _("Exception #{}# happened when process {} {}").format( e, self.url, self.dst_file)) raise DataDownloadError()
def order_status(self) -> ORDER_STATUS: if self.traded_quantity == 0: return ORDER_STATUS.NOT_TRADED elif self.traded_quantity == self.quantity: return ORDER_STATUS.FULL_TRADED elif abs(self.traded_quantity) < abs(self.quantity): return ORDER_STATUS.PARTLY_TRADED else: raise ImpossibleError( _("order quantity: {}, traded quantity: {}").format( self.quantity, self.traded_quantity))
async def retry(retry_time: int) -> ClientResponse: logger.info("Retry on remain times {}".format(retry_time)) retry_time -= 1 if retry_time < 0: logger.warning(_( "Request with args {}, {}, {}, {}, {}, {} failed " "with retries").format(path, query, postdict, cli_timeout, method, max_retry)) raise MaxRetryError(url=path, method=method, body=json.dumps(postdict), headers=headers) else: return await self._curl_bitmex(path, query, postdict, timeout, method, retry_time)
def __init__(self, start_time: datetime.datetime, end_time: datetime.datetime, frequency: str): assert is_aware_datetime(start_time) assert is_aware_datetime(end_time) if start_time >= end_time: raise SettingError(_("START TIME can not bigger than END TIME")) self.start_time = start_time self.end_time = end_time self.frequency = FREQ_DICT.get(frequency) self.current = start_time
def get_last_price(self, symbol: str, date_time: datetime.datetime) -> float: assert is_aware_datetime(date_time) kline = self._kline_store.get(symbol) time_target = make_datetime_exactly(date_time, "T", forward=False) try: bar = kline.loc[time_target] return bar['close'] except KeyError: logger.warning( _("Instrument {} on {} has no bar data., Use 0 as last price". format(symbol, date_time))) return 0.0
def load_instruments(self, exchange: Optional['BitmexSimulateExchange']) -> None: logger.debug("Now loading the instruments data.") instruments_file = os.path.join(self.data_dir, INSTRUMENT_FILENAME) with open(instruments_file) as f: instruments_raw = json.load(f) for instrument_raw in instruments_raw: if instrument_raw['typ'] in self._abandon_instrument_type: continue instrument_cls = self.instrument_cls.get(instrument_raw['typ']) if instrument_cls is None: raise LoadDataError( _("Unsupport instrument type {}").format( instrument_raw['typ'])) instrument = instrument_cls.create(instrument_map, instrument_raw, exchange) self.instruments[instrument.symbol] = instrument logger.debug("Now loading the instruments data.")
def __init__(self) -> None: for setting in dir(default_settings): if setting.isupper(): setattr(self, setting, getattr(default_settings, setting)) # self.SETTING_MODULE = module setting_module = os.environ.get(SETTING_MODULE, DEFAULT_SETTING_MODULE) try: mod = import_module(setting_module) except ImportError: warnings.warn( _("Can not find settings.py in the current path," " we are going to use the default settings.")) return for setting in dir(mod): if setting.isupper(): setting_value = getattr(mod, setting) setattr(self, setting, setting_value)
def process(self) -> None: try: self.setup() for i, row in enumerate( self.csv_reader(self.stream_decompress_requests())): if self.chunk_process: self.cache.append(self.process_row(row)) if i % self.cache_num == 0: self.process_chunk() self.cache = list() else: self.process_row(row) if self.cache: self.process_chunk() except BaseException as e: self.rollback() logger.exception( _("Exception {} happened when process {} data").format( e, self.date)) raise DataDownloadError() self.cleanup()
def leverage(self) -> float: raise MarginError(_("Cross position doesn't support to see position leverage"))
def _on_message(self, message: dict) -> None: '''Handler for parsing WS messages.''' start = time.time() table = message['table'] if 'table' in message else None action = message['action'] if 'action' in message else None if 'subscribe' in message: if message['success']: logger.debug( _("Subscribed to {}").format(message['subscribe'])) else: self.error( _("Unable to subscribe to {}. Error: \"{}\" Please check and restart." ).format(message['request']['args'][0], message['error'])) elif 'unsubscribe' in message: if message['success']: logger.debug( _("Unsubscribed to {}.").format(message['unsubscribe'])) else: self.error( _("Unable to subscribe to {}. Error: \"{}\" Please check and restart." ).format(message['request']['args'][0], message['error'])) elif 'status' in message: if message['status'] == 400: self.error(message['error']) if message['status'] == 401: self.error(_("API Key incorrect, please check and restart.")) elif action: if table not in self._data: self._data[table] = [] if table not in self._keys: self._keys[table] = [] # There are four possible actions from the WS: # 'partial' - full table image # 'insert' - new row # 'update' - update row # 'delete' - delete row if action == 'partial': logger.debug("{}: partial".format(table)) if message['table'] == "quote": for data in message['data']: self.quote_data[data['symbol']] = data elif message['table'] == 'orderBookL2_25': for data in message['data']: side_book = getattr(self.order_book[data['symbol']], data['side']) side_book[data['id']] = data elif message['table'] == 'position': for data in message['data']: assert data['currency'] == CURRENCY self.positions[data['symbol']] = data elif message['table'] == 'margin': for data in message['data']: assert data['currency'] == CURRENCY self.margin = data else: self._data[table] += message['data'] # Keys are communicated on partials to let you know how to uniquely identify # an item. We use it for updates. self._keys[table] = message.get('keys') elif action == 'insert': logger.debug('{}: inserting {}'.format(table, message['data'])) if message['table'] == 'quote': for data in message['data']: self.quote_data[data['symbol']] = data elif message['table'] == 'orderBookL2_25': for data in message['data']: side_book = getattr(self.order_book[data['symbol']], data['side']) side_book[data['id']] = data elif message['table'] == 'position': for data in message['data']: assert data['currency'] == CURRENCY self.positions[data['symbol']] = data elif message['table'] == 'margin': raise NotImplementedError else: self._data[table] += message['data'] # Limit the max length of the table to avoid excessive memory usage. # Don't trim orders because we'll lose valuable state if we do. if table not in ['order', 'orderBookL2'] and len( self._data[table]) > BitmexWebsocket.MAX_TABLE_LEN: self._data[table] = self._data[table][( BitmexWebsocket.MAX_TABLE_LEN // 2):] elif action == 'update': logger.debug( _('{}: updating {}').format(table, message['data'])) # Locate the item in the collection and update it. if message['table'] == "orderBookL2_25": for data in message['data']: side_book = getattr(self.order_book[data['symbol']], data['side']) bar = side_book[data['id']] bar.update(data) elif message['table'] == 'position': for data in message['data']: assert data['currency'] == CURRENCY self.positions[data['symbol']].update(data) elif message['table'] == 'margin': for data in message['data']: assert data['currency'] == CURRENCY self.margin.update(data) else: for updateData in message['data']: item = findItemByKeys(self._keys[table], self._data[table], updateData) if not item: continue # No item found to update. Could happen before push # Log executions if table == 'order': is_canceled = 'ordStatus' in updateData and updateData[ 'ordStatus'] == 'Canceled' if 'cumQty' in updateData and not is_canceled: contExecuted = updateData['cumQty'] - item[ 'cumQty'] if contExecuted > 0: logger.info( "Execution: {} {} Contracts of at {}". format(item['side'], contExecuted, item['symbol'], item['price'])) # Update this item. item.update(updateData) # Remove canceled / filled orders if table == 'order' and item['leavesQty'] <= 0: self._data[table].remove(item) elif action == 'delete': logger.debug( _('{}: deleting {}').format(table, message['data'])) # Locate the item in the collection and remove it. if message['table'] == "orderBookL2_25": for data in message['data']: side_book = getattr(self.order_book[data['symbol']], data['side']) side_book.pop(data['id']) else: for deleteData in message['data']: item = findItemByKeys(self._keys[table], self._data[table], deleteData) self._data[table].remove(item) else: raise ImpossibleError(_("Unknown action: {}").format(action)) logger.debug( _("Tick data process time: {}").format( round(time.time() - start, 7)))
def rollback(self) -> None: logger.info( _("Rollback!Remove the not complete file {}").format( self.dst_file)) os.remove(self.dst_file)
async def _curl_bitmex(self, path: str, query: Optional[dict] = None, postdict: Optional[dict] = None, timeout: int = sentinel, method: str = None, max_retry: int = 5, api_key: Optional[APIKey] = None) -> ClientResponse: url = self.base_url + path url_obj = URL(url) # if timeout is None: # timeout = self.timeout # Default to POST if data is attached, GET otherwise if not method: method = 'POST' if postdict else 'GET' # By default don't retry POST or PUT. Retrying GET/DELETE is # okay because they are idempotent. # In the future we could allow retrying PUT, # so long as 'leavesQty' is not used (not idempotent), # or you could change the clOrdID # (set {"clOrdID": "new", "origClOrdID": "old"}) so that an amend # can't erroneously be applied twice. if query: url_obj = url_obj.with_query(query) headers = {} if postdict: data = json.dumps(postdict) headers.update({'content-type': "application/json"}) else: data = '' if api_key: headers.update(gen_header_dict(api_key.api_secret, api_key.api_key, method, str(url_obj), data)) if timeout is not sentinel: cli_timeout = ClientTimeout(total=timeout) else: cli_timeout = sentinel async def retry(retry_time: int) -> ClientResponse: logger.info("Retry on remain times {}".format(retry_time)) retry_time -= 1 if retry_time < 0: logger.warning(_( "Request with args {}, {}, {}, {}, {}, {} failed " "with retries").format(path, query, postdict, cli_timeout, method, max_retry)) raise MaxRetryError(url=path, method=method, body=json.dumps(postdict), headers=headers) else: return await self._curl_bitmex(path, query, postdict, timeout, method, retry_time) try: resp = await self.session.request(method=method, url=str(url_obj), proxy=self._proxy, headers=headers, data=data, ssl=self._ssl, timeout=cli_timeout) if 200 <= resp.status < 300: return resp elif 404 >= resp.status >= 400: content = await resp.json() error = content['error'] message = error['message'].lower() if error else '' name = error['name'].lower() if error else '' logger.warning(_("Bitmex request url:{}, method:{}, postdict:{}, " "headers:{} error ." "Return with status code:{}, error {} ," "message: {}").format(resp.request_info.url, resp.request_info.method, postdict, resp.request_info.headers, resp.status, name, message)) if resp.status == 400: if 'insufficient available balance' in message: logger.warning(_('Account out of funds. The message: {}').format(error["message"])) raise MarginNotEnoughError(message) elif resp.status == 401: if api_key: raise HttpAuthError(api_key.api_key, api_key.api_secret) else: raise HttpAuthError('', '') elif resp.status == 403: raise HttpError(url=resp.request_info.url, method=resp.request_info.method, body=json.dumps(postdict), headers=resp.request_info.headers, message=message) elif resp.status == 404: if method == 'DELETE': if postdict: logger.warning(_("Order not found: {}").format(postdict.get('orderID'))) raise NotFoundError(url=resp.request_info.url, method=resp.request_info.method, body=json.dumps(postdict), headers=resp.request_info.headers, message=message) return resp # exit_or_throw() elif resp.status == 429: logger.warning(_("Ratelimited on current request. Sleeping, " "then trying again. Try fewer order pairs or" " contact [email protected] to raise your limits. " "Request: {} postdict: {}").format(url_obj, postdict)) # Figure out how long we need to wait. ratelimit_reset = resp.headers['X-RateLimit-Reset'] to_sleep = int(ratelimit_reset) - int(time.time()) reset_str = datetime.datetime.fromtimestamp(int(ratelimit_reset)).strftime('%X') logger.warning(_("Your ratelimit will reset at {}. " "Sleeping for {} seconds.").format(reset_str, to_sleep)) raise RateLimitError(url=resp.request_info.url, method=resp.request_info.method, body=json.dumps(postdict), headers=resp.request_info.headers, ratelimit_reset=ratelimit_reset) # 503 - BitMEX temporary downtime, likely due to a deploy. Try again elif resp.status == 503: logger.warning(_("Unable to contact the BitMEX API (503), retrying. " "Bitmex is mostly overloaded now," "Request: {} {} " "Response header :{}").format(url_obj, postdict, resp.headers)) return await retry(max_retry) else: content = await resp.text() raise HttpError(url=resp.request_info.url, method=resp.request_info.method, body=json.dumps(postdict), headers=resp.request_info.headers, message=content) except asyncio.TimeoutError: # Timeout, re-run this request logger.warning(_("Timed out on request: path:{}, query:{}, " "postdict:{}, verb:{}, timeout:{}, retry:{}, " "retrying...").format(path, query, postdict, method, timeout, max_retry)) return await retry(max_retry)
def rollback(self) -> None: logger.info( _("Rollback : Remove the not complete dir {}").format( self.dst_dir)) self.csv_file_writers.close() shutil.rmtree(self.dst_dir)
def maint_margin(self, value: float) -> float: raise MarginError(_("You can not set the margin in cross position"))