def __on_close(self, ws): Logger.info(self.__class__.__name__, "Socket <%s> is closed." % self.id) self._connecting = False self._connected = False if len(self.on_close_handlers) > 0: for handler in self.on_close_handlers: handler(ws)
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ for item in message: if 'channel' in item: if re.search(r'ok_sub_futureusd_(.*)_depth_this_week', item['channel']): instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, item['data']) if instmt.get_l2_depth().is_diff( instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif re.search(r'ok_sub_futureusd_(.*)_trade_this_week', item['channel']): trades = self.api_socket.parse_trade(instmt, item['data']) for trade in trades: if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) else: Logger.info(self.__class__.__name__, 'Nothing to do!!')
def insert(self, table, columns, types, values, primary_key_index=[], is_orreplace=False, is_commit=True): """ Insert into the table :param table: Table name :param columns: Column array :param types: Type array :param values: Value array :param primary_key_index: An array of indices of primary keys in columns, e.g. [0] means the first column is the primary key :param is_orreplace: Indicate if the query is "INSERT OR REPLACE" """ if len(columns) != len(values): return False column_names = ','.join(columns) value_string = ','.join([SqlClient.convert_str(e) for e in values]) if is_orreplace: sql = "%s %s (%s) values (%s)" % (self.replace_keyword(), table, column_names, value_string) else: sql = "insert into %s (%s) values (%s)" % (table, column_names, value_string) self.lock.acquire() try: self.execute(sql) if is_commit: self.commit() except Exception as e: Logger.info(self.__class__.__name__, "SQL error: %s\nSQL: %s" % (e, sql)) self.lock.release() return True
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): instmt_code_split = instmt.get_instmt_code().split('_') ##xie.2018.1.4 begin if instmt_code_split[0] == "futureusd": # Future instruments instmt.set_order_book_channel_id("ok_sub_%s_%s_depth_%s_20" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) instmt.set_trades_channel_id("ok_sub_%s_%s_trade_%s" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) else: # Spot instruments instmt.set_order_book_channel_id("ok_sub_%s_depth_20" % instmt.get_instmt_code()) instmt.set_trades_channel_id("ok_sub_%s_deals" % instmt.get_instmt_code()) ##xie.2018.1.4 end ws.send(self.api_socket.get_order_book_subscription_string(instmt)) ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def on_message_handler(self, instmt, messages): """ Incoming message handler :param instmt: Instrument :param message: Message """ for message in messages: keys = message.keys() if 'channel' in keys: if message['channel'] == instmt.get_order_book_channel_id(): instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, message) # Insert only if the first 5 levels are different if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message['channel'] == instmt.get_trades_channel_id(): for trade_raw in message['data']: trade = self.api_socket.parse_trade(instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) else: Logger.info(self.__class__.__name__, ' - ' + json.dumps(message))
def __on_open(self, ws): Logger.info(self.__class__.__name__, "Socket <%s> is opened." % self.id) self._connected = True if len(self.on_open_handlers) > 0: for handler in self.on_open_handlers: handler(ws)
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): instmt_code_split = instmt.get_instmt_code().split('_') if len(instmt_code_split) == 2: # Future instruments instmt.set_order_book_channel_id("ok_sub_spot_%s_%s_depth_5" % \ (instmt_code_split[0].lower(), instmt_code_split[1].lower())) instmt.set_trades_channel_id("ok_sub_spot_%s_%s_deals" % \ (instmt_code_split[0].lower(), instmt_code_split[1].lower())) else: # Spot instruments instmt.set_order_book_channel_id( "ok_sub_spot_%s_depth_5" % instmt.get_instmt_code().lower()) instmt.set_trades_channel_id("ok_sub_spot_%s_deals" % instmt.get_instmt_code().lower()) ws.send(self.api_socket.get_order_book_subscription_string(instmt)) # ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def create(self, table, columns, types, primary_key_index=[], is_ifnotexists=True): """ Create table in the database :param table: Table name :param columns: Column array :param types: Type array :param primary_key_index: Unused/for comptibility :param is_ifnotexists: Create table if not exists keyword """ file_path = os.path.join(self.file_directory, table + ".csv") columns = [e.split(' ')[0] for e in columns] if len(columns) != len(types): return False self.lock.acquire() if os.path.isfile(file_path): Logger.info(self.__class__.__name__, "File (%s) has been created already." % file_path) else: with open(file_path, 'w+') as csvfile: csvfile.write(','.join(["\"" + e + "\"" for e in columns]) + '\n') self.lock.release() return True
def on_message_handler(self, instmt, messages): """ Incoming message handler :param instmt: Instrument :param message: Message """ for message in messages: keys = message.keys() if 'channel' in keys: if 'data' in keys: if message['channel'] == instmt.get_order_book_channel_id(): data = message['data'] instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, data) # Insert only if the first 5 levels are different if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message['channel'] == instmt.get_trades_channel_id(): for trade_raw in message['data']: trade = self.api_socket.parse_trade(instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif 'success' in keys: Logger.info(self.__class__.__name__, "Subscription to channel %s is %s" \ % (message['channel'], message['success'])) else: Logger.info(self.__class__.__name__, ' - ' + json.dumps(message))
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): instmt_code_split = instmt.get_instmt_code().split('_') if len(instmt_code_split) == 3: # Future instruments instmt.set_order_book_channel_id("ok_sub_%s_%s_depth_%s_20" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) instmt.set_trades_channel_id("ok_sub_%s_%s_trade_%s" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) else: # Spot instruments instmt.set_order_book_channel_id("ok_sub_%s_depth_20" % instmt.get_instmt_code()) instmt.set_trades_channel_id("ok_sub_%s_trades" % instmt.get_instmt_code()) ws.send(self.api_socket.get_order_book_subscription_string(instmt)) ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ if 'ping' in message: #handle ping response ts = message['ping'] self.api_socket.send(json.dumps({'pong': ts})) elif 'ch' in message: if 'trade.detail' in message['ch']: trades = self.api_socket.parse_trade(instmt, message['tick']['data']) for trade in trades: if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif 'depth.step' in message['ch']: instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, message['tick']) if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) else: Logger.error(self.__class__.__name__, 'Not Trade or Market') else: Logger.info(self.__class__.__name__, 'Nothing to do!!')
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ if isinstance(message, dict): keys = message.keys() if 'event' in keys and message['event'] == 'info' and 'version' in keys: Logger.info(self.__class__.__name__, "Bitfinex version: %s" % message['version']) elif 'event' in keys and message['event'] == 'subscribed': if instmt.get_instmt_code() == message['pair']: if message['channel'] == 'book': instmt.set_order_book_channel_id(message['chanId']) elif message['channel'] == 'trades': instmt.set_trades_channel_id(message['chanId']) else: raise Exception("Unknown channel %s : <%s>" % (message['channel'], message)) Logger.info(self.__class__.__name__, 'Subscription: %s, pair: %s, channel Id: %s' % \ (message['channel'], instmt.get_instmt_code(), message['chanId'])) elif isinstance(message, list): if message[0] == instmt.get_order_book_channel_id(): if isinstance(message[1], list): self.api_socket.parse_l2_depth(instmt, message[1]) elif len(message) != 2: instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, message) else: return if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message[0] == instmt.get_trades_channel_id(): # No recovery trade # if isinstance(message[1], list): # raw_trades = message[1] # raw_trades.sort(key=lambda x:x[0]) # for raw in raw_trades: # trade = self.api_socket.parse_trade(instmt, raw) # try: # if int(trade.trade_id) > int(instmt.get_exch_trade_id()): # instmt.incr_trade_id() # instmt.set_exch_trade_id(trade.trade_id) # self.insert_trade(instmt, trade) # except Exception as e: # Logger.info('test', "trade.trade_id(%s):%s" % (type(trade.trade_id), trade.trade_id)) # Logger.info('test', "instmt.get_exch_trade_id()(%s):%s" % (type(instmt.get_exch_trade_id()), instmt.get_exch_trade_id())) # raise e if message[1] == 'tu': trade = self.api_socket.parse_trade(instmt, message[3:]) if int(trade.trade_id) > int(instmt.get_exch_trade_id()): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade)
def create(self, table, columns, types, primary_key_index=[], is_ifnotexists=True): """ Create table in the database. Caveat - Assign the first few column as the keys!!! :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ if len(columns) != len(types): raise Exception("Incorrect create statement. Number of columns and that of types are different.\n%s\n%s" % \ (columns, types)) if is_ifnotexists: ret = self.conn("\\v") if ret is not None: for t in ret: if table == self.decode_qtypes(t): Logger.info(self.__class__.__name__, "Table %s has been created." % table) return True Logger.info(self.__class__.__name__, "Table %s is going to be created." % table) c = columns[:] for i in range(0, len(types)): t = self.convert_type(types[i]) if t is str: if columns[i].find('date_time') > -1: c[i] += ":`timestamp$()" else: c[i] += ":`symbol$()" elif t is float: c[i] += ":`float$()" elif t is int: c[i] += ":`long$()" keys = [] for i in primary_key_index: keys.append(c[i]) for i in sorted(primary_key_index, reverse=True): del c[i] if len(keys) > 0: command = '%s:([%s] %s)' % (table, '; '.join(keys), '; '.join(c)) else: command = '%s:(%s)' % (table, '; '.join(c)) self.lock.acquire() try: self.conn.sync(command) except Exception as e: Logger.error(self.__class__.__name__, "Error in creat statement(%s).\n%s" % (command, e)) finally: self.lock.release() return True
def on_close_handler(self, instmt, ws): """ Socket on close handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is unsubscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) instmt.set_subscribed(False)
def connect(self, **kwargs): """ Connect :param path: sqlite file to connect """ addr = kwargs['addr'] Logger.info(self.__class__.__name__, 'Zmq client is connecting to %s' % addr) self.conn.bind(addr) return self.conn is not None
def on_close_handler(self, instmt, ws): """ Socket on close handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) instmt.set_subscribed(False)
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def insert(self, table, columns, types, values, primary_key_index=[], is_orreplace=False, is_commit=True): """ Insert into the table :param table: Table name :param columns: Column array :param types: Type array :param values: Value array :param primary_key_index: An array of indices of primary keys in columns, e.g. [0] means the first column is the primary key :param is_orreplace: Indicate if the query is "INSERT OR REPLACE" """ if len(columns) != len(values): return False sql = """insert into {table} ({column_names}) values ({value_string}) on conflict ({pk_column_names}) do update set ({non_pk_column_names}) = ({non_pk_values}) where {upsert_pk_selector} """ column_names = ','.join(columns) value_string = ','.join([SqlClient.convert_str(e) for e in values]) if is_orreplace: # Use upsert feature from PostgresSQL >= 9.5 pk_columns = ','.join([columns[i] for i in primary_key_index]) pk_values = [SqlClient.convert_str(values[i]) for i in primary_key_index] pk_selectors = [] for i in primary_key_index: pk_selectors.append('{0}.{1}={2}'.format(table, columns[i], pk_values[i])) upsert_pk_selector = ' and '.join(pk_selectors) non_pk_columns = [item for item in columns if item not in pk_columns] non_pk_values = ','.join([SqlClient.convert_str(values[columns.index(item)]) for item in non_pk_columns]) non_pk_columns = ','.join(non_pk_columns) sql = sql.format(table=table, column_names=column_names, value_string=value_string, pk_column_names=pk_columns, non_pk_column_names=non_pk_columns, non_pk_values=non_pk_values, upsert_pk_selector=upsert_pk_selector) else: sql = "insert into %s (%s) values (%s)" % (table, column_names, value_string) self.lock.acquire() try: self.execute(sql) if is_commit: self.commit() except Exception as e: Logger.info(self.__class__.__name__, "SQL error: %s\nSQL: %s" % (e, sql)) self.conn.rollback() self.lock.release() return True
def connect(self, **kwargs): """ Connect :param path: sqlite file to connect """ addr = kwargs['addr'] Logger.info(self.__class__.__name__, 'Kafka client is connecting to %s' % addr) self.conn = KafkaProducer( bootstrap_servers=addr, # key_serializer=str.encode, value_serializer=lambda v: json.dumps(v).encode('utf-8')) return self.conn is not None
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ if isinstance(message, dict): keys = message.keys() if 'event' in keys and message[ 'event'] == 'info' and 'version' in keys: Logger.info(self.__class__.__name__, "Bitfinex version: %s" % message['version']) elif 'event' in keys and message['event'] == 'subscribed': if instmt.get_instmt_code() == message['pair']: if message['channel'] == 'book': instmt.set_order_book_channel_id(message['chanId']) elif message['channel'] == 'trades': instmt.set_trades_channel_id(message['chanId']) else: raise Exception("Unknown channel %s : <%s>" % (message['channel'], message)) Logger.info(self.__class__.__name__, 'Subscription: %s, pair: %s, channel Id: %s' % \ (message['channel'], instmt.get_instmt_code(), message['chanId'])) elif isinstance(message, list): if message[0] == instmt.get_order_book_channel_id(): if isinstance(message[1], list): self.api_socket.parse_l2_depth(instmt, message[1]) elif len(message) != 2: instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, message) else: return if self.rate_limit(): return if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message[0] == instmt.get_trades_channel_id(): if message[1] == 'tu': trade = self.api_socket.parse_trade(instmt, message[3:]) if int(trade.trade_id) > int(instmt.get_exch_trade_id()): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade)
def start(self, instmt): """ Start the exchange gateway :param instmt: Instrument :return List of threads """ instmt.set_l2_depth(L2Depth(20)) instmt.set_prev_l2_depth(L2Depth(20)) instmt.set_instmt_snapshot_table_name(self.get_instmt_snapshot_table_name(instmt.get_exchange_name(), instmt.get_instmt_name())) self.init_instmt_snapshot_table(instmt) Logger.info(self.__class__.__name__, 'instmt snapshot table: {}'.format(instmt.get_instmt_snapshot_table_name())) return [self.api_socket.connect(self.api_socket.get_link(), on_message_handler=partial(self.on_message_handler, instmt), on_open_handler=partial(self.on_open_handler, instmt), on_close_handler=partial(self.on_close_handler, instmt))]
def insert(self, table, columns, types, values, primary_key_index=[], is_orreplace=False, is_commit=True): """ Insert into the table :param table: Table name :param columns: Column array :param types: Type array :param values: Value array :param primary_key_index: An array of indices of primary keys in columns, e.g. [0] means the first column is the primary key :param is_orreplace: Indicate if the query is "INSERT OR REPLACE" """ ret = dict(zip(columns, values)) ret['table'] = table self.lock.acquire() # print(ret) # print('columns:', columns) # print('values:', values) future = self.conn.send(table, value=ret) result = True # Block for 'synchronous' sends try: record_metadata = future.get(timeout=60) # print(record_metadata) Logger.info( self.__class__.__name__, "topic: %s, offset: %s" % (record_metadata.topic, record_metadata.offset)) except Exception as ex: Logger.error(self.__class__.__name__, "exception in producer:%s" % ex) # traceback.print_exc() result = False # raise Exception("kafka send failed.") finally: self.lock.release() return result
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) #if not instmt.get_subscribed(): ##instmt_code_split = instmt.get_instmt_code().split('_') ##xie.2018.1.4 begin instmt.set_order_book_channel_id("%s_depth" % instmt.get_instmt_code()) instmt.set_trades_channel_id("%s_trades" % instmt.get_instmt_code()) ##xie.2018.1.4 end ws.send(self.api_socket.get_order_book_subscription_string(instmt)) ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def connect(self, url, on_message_handler=None, on_open_handler=None, on_close_handler=None, on_error_handler=None, reconnect_interval=10): """ :param url: Url link :param on_message_handler: Message handler which take the message as the first argument :param on_open_handler: Socket open handler which take the socket as the first argument :param on_close_handler: Socket close handler which take the socket as the first argument :param on_error_handler: Socket error handler which take the socket as the first argument and the error as the second argument :param reconnect_interval: The time interval for reconnection """ Logger.info(self.__class__.__name__, "Connecting to socket <%s>..." % self.id) if on_message_handler is not None: self.on_message_handlers.append(on_message_handler) if on_open_handler is not None: self.on_open_handlers.append(on_open_handler) if on_close_handler is not None: self.on_close_handlers.append(on_close_handler) if on_error_handler is not None: self.on_error_handlers.append(on_error_handler) if not self._connecting and not self._connected: self._connecting = True self.ws = websocket.WebSocketApp(url, on_message=self.__on_message, on_close=self.__on_close, on_open=self.__on_open, on_error=self.__on_error) self.wst = threading.Thread(target=lambda: self.__start( reconnect_interval=reconnect_interval)) self.wst.start() return self.wst
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ keys = message.keys() if 'info' in keys: Logger.info(self.__class__.__name__, message['info']) elif 'subscribe' in keys: Logger.info(self.__class__.__name__, 'Subscription of %s is %s' % \ (message['request']['args'], \ 'successful' if message['success'] else 'failed')) elif 'table' in keys: if message['table'] == 'trade': for trade_raw in message['data']: if trade_raw["symbol"] == instmt.get_instmt_code(): # Filter out the initial subscriptions trade = self.api_socket.parse_trade(instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif message['table'] == 'orderBookL2': l2_depth = self.api_socket.parse_l2_depth(instmt, message) if l2_depth is not None and l2_depth.is_diff(instmt.get_l2_depth()): instmt.set_prev_l2_depth(instmt.get_l2_depth()) instmt.set_l2_depth(l2_depth) instmt.incr_order_book_id() self.insert_order_book(instmt) else: Logger.info(self.__class__.__name__, json.dumps(message,indent=2)) else: Logger.error(self.__class__.__name__, "Unrecognised message:\n" + json.dumps(message))
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ keys = message.keys() if 'info' in keys: Logger.info(self.__class__.__name__, message['info']) elif 'subscribe' in keys: Logger.info(self.__class__.__name__, 'Subscription of %s is %s' % \ (message['request']['args'], \ 'successful' if message['success'] else 'failed')) elif 'table' in keys: if message['table'] == 'trade': for trade_raw in message['data']: if trade_raw["symbol"] == instmt.get_instmt_code(): # Filter out the initial subscriptions trade = self.api_socket.parse_trade(instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif message['table'] == 'orderBook10': for data in message['data']: if data["symbol"] == instmt.get_instmt_code(): instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, data) if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) else: Logger.info(self.__class__.__name__, json.dumps(message,indent=2)) else: Logger.error(self.__class__.__name__, "Unrecognised message:\n" + json.dumps(message))
def connect(self, url, on_message_handler=None, on_open_handler=None, on_close_handler=None, on_error_handler=None, reconnect_interval=10): """ :param url: Url link :param on_message_handler: Message handler which take the message as the first argument :param on_open_handler: Socket open handler which take the socket as the first argument :param on_close_handler: Socket close handler which take the socket as the first argument :param on_error_handler: Socket error handler which take the socket as the first argument and the error as the second argument :param reconnect_interval: The time interval for reconnection """ Logger.info(self.__class__.__name__, "Connecting to socket <%s>..." % self.id) if on_message_handler is not None: self.on_message_handlers.append(on_message_handler) if on_open_handler is not None: self.on_open_handlers.append(on_open_handler) if on_close_handler is not None: self.on_close_handlers.append(on_close_handler) if on_error_handler is not None: self.on_error_handlers.append(on_error_handler) if not self._connecting and not self._connected: self._connecting = True self.ws = websocket.WebSocketApp(url, on_message=self.__on_message, on_close=self.__on_close, on_open=self.__on_open, on_error=self.__on_error) self.wst = threading.Thread(target=lambda: self.__start(reconnect_interval=reconnect_interval)) self.wst.start() return self.wst
def on_message_handler(self, instmt, messages): """ Incoming message handler :param instmt: Instrument :param message: Message """ for message in messages: keys = message.keys() # print(keys) if 'channel' in keys: if 'data' in keys: if message['channel'] == instmt.get_order_book_channel_id( ): data = message['data'] l2_depth = self.api_socket.parse_l2_depth(instmt, data) if l2_depth is not None: # Insert only if the first 5 levels are different # if l2_depth is not None and instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.set_prev_l2_depth(instmt.get_l2_depth()) instmt.set_l2_depth(l2_depth) instmt.incr_order_book_id() self.insert_order_book(instmt) elif message['channel'] == instmt.get_trades_channel_id(): for trade_raw in message['data']: trade = self.api_socket.parse_trade( instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif 'success' in keys: Logger.info(self.__class__.__name__, "Subscription to channel %s is %s" \ % (message['channel'], message['success'])) else: Logger.info(self.__class__.__name__, ' - ' + json.dumps(message))
def create(self, table, columns, types, is_ifnotexists=True): """ Create table in the database :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ file_path = os.path.join(self.file_directory, table + ".csv") print(file_path) columns = [e.split(' ')[0] for e in columns] if len(columns) != len(types): return False self.lock.acquire() if os.path.isfile(file_path): Logger.info(self.__class__.__name__, "File (%s) has been created already." % file_path) else: with open(file_path, 'w+') as csvfile: csvfile.write(','.join(["\"" + e + "\"" for e in columns])+'\n') self.lock.release() return True
def create(self, table, columns, types, is_ifnotexists=True): """ Create table in the database :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ file_path = os.path.join(self.file_directory, self.date.strftime(table) + ".csv") columns = [e.split(' ')[0] for e in columns] if len(columns) != len(types): return False current_date = datetime.utcnow() if current_date.day != self.date.day: if os.path.exists(file_path): if platform.system() == "Windows": compression_command = "gzip.exe " else: compression_command = "gzip " command = compression_command + file_path subprocess.Popen(shlex.split(command)) self.date = current_date file_path = os.path.join(self.file_directory, self.date.strftime(table) + ".csv") self.lock.acquire() if os.path.isfile(file_path): Logger.info(self.__class__.__name__, "File (%s) has been created already.", file_path) else: with open(file_path, 'w+') as csvfile: csvfile.write(','.join(["\"" + e + "\"" for e in columns]) + '\n') self.lock.release() return True
def connect(self, **kwargs): """ Connect :param path: sqlite file to connect """ host = kwargs['host'] port = kwargs['port'] Logger.info(self.__class__.__name__, 'Kdb+ database client is connecting to %s:%d' % (host, port)) self.conn = qconnection.QConnection(host=host, port=port) self.conn.open() if self.conn.is_connected(): Logger.info(self.__class__.__name__, 'Connection to %s:%d is successful.' % (host, port)) else: Logger.info(self.__class__.__name__, 'Connection to %s:%d is failed.' % (host, port)) return self.conn.is_connected()
def connect(self, **kwargs): """ Connect :param path: sqlite file to connect """ host = kwargs['host'] port = kwargs['port'] Logger.info( self.__class__.__name__, 'Kdb+ database client is connecting to %s:%d' % (host, port)) self.conn = qconnection.QConnection(host=host, port=port) self.conn.open() if self.conn.is_connected(): Logger.info(self.__class__.__name__, 'Connection to %s:%d is successful.' % (host, port)) else: Logger.info(self.__class__.__name__, 'Connection to %s:%d is failed.' % (host, port)) return self.conn.is_connected()
def __start(self, reconnect_interval=10): while True: self.ws.run_forever() Logger.info(self.__class__.__name__, "Socket <%s> is going to reconnect..." % self.id) time.sleep(reconnect_interval)
def execute(self, sql): """ Execute the sql command :param sql: SQL command """ Logger.info(self.__class__.__name__, "Execute command = %s" % sql)
def __on_error(self, ws, error): Logger.info(self.__class__.__name__, "Socket <%s> error:\n %s" % (self.id, error)) if len(self.on_error_handlers) > 0: for handler in self.on_error_handlers: handler(ws, error)
""" params = { 'address-id': address_id, 'amount': amount, "currency": currency, "fee": fee, "addr-tag": addr_tag } url = '/v1/dw/withdraw/api/create' return hbu.api_key_post(params, url) # 申请取消提现虚拟币 def cancel_withdraw(address_id): """ :param address_id: :return: { "status": "ok", "data": 700 } """ params = {} url = '/v1/dw/withdraw-virtual/{0}/cancel'.format(address_id) return hbu.api_key_post(params, url) if __name__ == '__main__': Logger.info(get_symbols()) #print get_symbols()
def main(): parser = argparse.ArgumentParser(description='Bitcoin exchange market data feed handler.') parser.add_argument('-instmts', action='store', help='Instrument subscription file.', default='subscriptions.ini') parser.add_argument('-exchtime', action='store_true', help='Use exchange timestamp.') parser.add_argument('-kdb', action='store_true', help='Use Kdb+ as database.') parser.add_argument('-csv', action='store_true', help='Use csv file as database.') parser.add_argument('-sqlite', action='store_true', help='Use SQLite database.') parser.add_argument('-mysql', action='store_true', help='Use MySQL.') parser.add_argument('-zmq', action='store_true', help='Use zmq publisher.') parser.add_argument('-mysqldest', action='store', dest='mysqldest', help='MySQL destination. Formatted as <name:pwd@host:port>', default='') parser.add_argument('-mysqlschema', action='store', dest='mysqlschema', help='MySQL schema.', default='') parser.add_argument('-kdbdest', action='store', dest='kdbdest', help='Kdb+ destination. Formatted as <host:port>', default='') parser.add_argument('-zmqdest', action='store', dest='zmqdest', help='Zmq destination. For example \"tcp://127.0.0.1:3306\"', default='') parser.add_argument('-sqlitepath', action='store', dest='sqlitepath', help='SQLite database path', default='') parser.add_argument('-csvpath', action='store', dest='csvpath', help='Csv file path', default='') parser.add_argument('-output', action='store', dest='output', help='Verbose output file path') args = parser.parse_args() Logger.init_log(args.output) db_clients = [] is_database_defined = False if args.sqlite: db_client = SqliteClient() db_client.connect(path=args.sqlitepath) db_clients.append(db_client) is_database_defined = True if args.mysql: db_client = MysqlClient() mysqldest = args.mysqldest logon_credential = mysqldest.split('@')[0] connection = mysqldest.split('@')[1] db_client.connect(host=connection.split(':')[0], port=int(connection.split(':')[1]), user=logon_credential.split(':')[0], pwd=logon_credential.split(':')[1], schema=args.mysqlschema) db_clients.append(db_client) is_database_defined = True if args.csv: if args.csvpath != '': db_client = FileClient(dir=args.csvpath) else: db_client = FileClient() db_clients.append(db_client) is_database_defined = True if args.kdb: db_client = KdbPlusClient() db_client.connect(host=args.kdbdest.split(':')[0], port=int(args.kdbdest.split(':')[1])) db_clients.append(db_client) is_database_defined = True if args.zmq: db_client = ZmqClient() db_client.connect(addr=args.zmqdest) db_clients.append(db_client) is_database_defined = True if not is_database_defined: print('Error: Please define which database is used.') parser.print_help() sys.exit(1) # Subscription instruments if args.instmts is None or len(args.instmts) == 0: print('Error: Please define the instrument subscription list. You can refer to subscriptions.ini.') parser.print_help() sys.exit(1) # Use exchange timestamp rather than local timestamp if args.exchtime: ExchangeGateway.is_local_timestamp = False subscription_instmts = SubscriptionManager(args.instmts).get_subscriptions() ExchangeGateway.init_snapshot_table(db_clients) Logger.info('[main]', 'Subscription file = %s' % args.instmts) log_str = 'Exchange/Instrument/InstrumentCode:\n' for instmt in subscription_instmts: log_str += '%s/%s/%s\n' % (instmt.exchange_name, instmt.instmt_name, instmt.instmt_code) Logger.info('[main]', log_str) exch_gws = [] exch_gws.append(ExchGwBtccSpot(db_clients)) exch_gws.append(ExchGwBtccFuture(db_clients)) exch_gws.append(ExchGwBitmex(db_clients)) exch_gws.append(ExchGwBitfinex(db_clients)) exch_gws.append(ExchGwOkCoin(db_clients)) exch_gws.append(ExchGwKraken(db_clients)) exch_gws.append(ExchGwGdax(db_clients)) exch_gws.append(ExchGwBitstamp(db_clients)) exch_gws.append(ExchGwGatecoin(db_clients)) exch_gws.append(ExchGwQuoine(db_clients)) exch_gws.append(ExchGwPoloniex(db_clients)) exch_gws.append(ExchGwBittrex(db_clients)) threads = [] for exch in exch_gws: for instmt in subscription_instmts: if instmt.get_exchange_name() == exch.get_exchange_name(): Logger.info("[main]", "Starting instrument %s-%s..." % \ (instmt.get_exchange_name(), instmt.get_instmt_name())) threads += exch.start(instmt)
def main(): parser = argparse.ArgumentParser( description='Bitcoin exchange market data feed handler.') parser.add_argument('-instmts', action='store', help='Instrument subscription file.', default='subscriptions.ini') parser.add_argument('-exchtime', action='store_true', help='Use exchange timestamp.') parser.add_argument('-kdb', action='store_true', help='Use Kdb+ as database.') parser.add_argument('-csv', action='store_true', help='Use csv file as database.') parser.add_argument('-sqlite', action='store_true', help='Use SQLite database.') parser.add_argument('-mysql', action='store_true', help='Use MySQL.') parser.add_argument('-zmq', action='store_true', help='Use zmq publisher.') parser.add_argument('-kafka', action='store_true', help='Use kafka publisher.') parser.add_argument( '-mysqldest', action='store', dest='mysqldest', help='MySQL destination. Formatted as <name:pwd@host:port>', default='') parser.add_argument('-mysqlschema', action='store', dest='mysqlschema', help='MySQL schema.', default='') parser.add_argument('-kdbdest', action='store', dest='kdbdest', help='Kdb+ destination. Formatted as <host:port>', default='') parser.add_argument( '-zmqdest', action='store', dest='zmqdest', help='Zmq destination. For example \"tcp://127.0.0.1:3306\"', default='') parser.add_argument( '-kafkadest', action='store', dest='kafkadest', help='Kafka destination. For example \"127.0.0.1:9092\"', default='') parser.add_argument('-sqlitepath', action='store', dest='sqlitepath', help='SQLite database path', default='') parser.add_argument('-csvpath', action='store', dest='csvpath', help='Csv file path', default='') parser.add_argument('-output', action='store', dest='output', help='Verbose output file path') args = parser.parse_args() Logger.init_log(args.output) db_clients = [] is_database_defined = False if args.sqlite: db_client = SqliteClient() db_client.connect(path=args.sqlitepath) db_clients.append(db_client) is_database_defined = True if args.mysql: db_client = MysqlClient() mysqldest = args.mysqldest logon_credential = mysqldest.split('@')[0] connection = mysqldest.split('@')[1] db_client.connect(host=connection.split(':')[0], port=int(connection.split(':')[1]), user=logon_credential.split(':')[0], pwd=logon_credential.split(':')[1], schema=args.mysqlschema) db_clients.append(db_client) is_database_defined = True if args.csv: if args.csvpath != '': db_client = FileClient(dir=args.csvpath) else: db_client = FileClient() db_clients.append(db_client) is_database_defined = True if args.kdb: db_client = KdbPlusClient() db_client.connect(host=args.kdbdest.split(':')[0], port=int(args.kdbdest.split(':')[1])) db_clients.append(db_client) is_database_defined = True if args.zmq: db_client = ZmqClient() db_client.connect(addr=args.zmqdest) db_clients.append(db_client) is_database_defined = True if args.kafka: db_client = KafkaClient() db_client.connect(addr=args.kafkadest) db_clients.append(db_client) is_database_defined = True if not is_database_defined: print('Error: Please define which database is used.') parser.print_help() sys.exit(1) # Subscription instruments if args.instmts is None or len(args.instmts) == 0: print( 'Error: Please define the instrument subscription list. You can refer to subscriptions.ini.' ) parser.print_help() sys.exit(1) # Use exchange timestamp rather than local timestamp if args.exchtime: ExchangeGateway.is_local_timestamp = False # Initialize subscriptions subscription_instmts = SubscriptionManager( args.instmts).get_subscriptions() if len(subscription_instmts) == 0: print( 'Error: No instrument is found in the subscription file. ' + 'Please check the file path and the content of the subscription file.' ) parser.print_help() sys.exit(1) # Initialize snapshot destination ExchangeGateway.init_snapshot_table(db_clients) Logger.info('[main]', 'Subscription file = %s' % args.instmts) log_str = 'Exchange/Instrument/InstrumentCode:\n' for instmt in subscription_instmts: log_str += '%s/%s/%s\n' % (instmt.exchange_name, instmt.instmt_name, instmt.instmt_code) Logger.info('[main]', log_str) exch_gws = [] exch_gws.append(ExchGwBtccSpot(db_clients)) exch_gws.append(ExchGwBtccFuture(db_clients)) exch_gws.append(ExchGwBitmex(db_clients)) exch_gws.append(ExchGwBitfinex(db_clients)) exch_gws.append(ExchGwOkCoin(db_clients)) exch_gws.append(ExchGwKraken(db_clients)) exch_gws.append(ExchGwGdax(db_clients)) exch_gws.append(ExchGwBitstamp(db_clients)) exch_gws.append(ExchGwGatecoin(db_clients)) exch_gws.append(ExchGwQuoine(db_clients)) exch_gws.append(ExchGwPoloniex(db_clients)) exch_gws.append(ExchGwBittrex(db_clients)) exch_gws.append(ExchGwYunbi(db_clients)) exch_gws.append(ExchGwLiqui(db_clients)) exch_gws.append(ExchGwBinance(db_clients)) exch_gws.append(ExchGwCryptopia(db_clients)) threads = [] for exch in exch_gws: for instmt in subscription_instmts: if instmt.get_exchange_name() == exch.get_exchange_name(): Logger.info("[main]", "Starting instrument %s-%s..." % \ (instmt.get_exchange_name(), instmt.get_instmt_name())) threads += exch.start(instmt)
:param table: Table name :param condition: Where condition """ if condition == '1==1': statement = 'delete from `%s' % (table) else: statement = 'delete from `%s where %s' % ( table, self.parse_condition(condition)) self.conn.sync(statement) return True if __name__ == '__main__': Logger.init_log() db_client = KdbPlusClient() db_client.connect(host='localhost', port=5000) db_client.create('test', ['c1', 'c2', 'c3', 'c4'], ['varchar(20)', 'int', 'decimal(8, 20)', 'int'], [0], False) db_client.insert('test', ['c1', 'c2', 'c3', 'c4'], ['abc', 1, 1.1, 5]) db_client.insert('test', ['c1', 'c2', 'c3', 'c4'], ['efg', 2, 2.2, 6]) db_client.insert('test', ['c1', 'c2', 'c3', 'c4'], ['hij', 3, 3.3, 7]) # Logger.info('test', db_client.select('test', columns=['*'])) Logger.info( 'test', db_client.select('test', columns=['c2', 'c3'], condition='c1 >= "abc" and c2 > 1')) # Logger.info('test', db_client.select('test', columns=['*'], orderby='c1 desc', limit=1)) db_client.delete('test', 'c1="abc"')
def create(self, table, columns, types, primary_key_index=(), is_ifnotexists=True): """ Create table in the database. Caveat - Assign the first few column as the keys!!! :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ if len(columns) != len(types): raise Exception("Incorrect create statement. Number of columns and that of types are different.\n%s\n%s" % \ (columns, types)) if is_ifnotexists: ret = self.conn("\\v") if ret is not None: for t in ret: if table == self.decode_qtypes(t): Logger.info(self.__class__.__name__, "Table %s has been created." % table) return True Logger.info(self.__class__.__name__, "Table %s is going to be created." % table) c = columns[:] for i in range(0, len(types)): t = self.convert_type(types[i]) if t is str: if columns[i].find('date_time') > -1: c[i] += ":`timestamp$()" else: c[i] += ":`symbol$()" elif t is float: c[i] += ":`float$()" elif t is int: c[i] += ":`long$()" keys = [] for i in primary_key_index: keys.append(c[i]) for i in sorted(primary_key_index, reverse=True): del c[i] if len(keys) > 0: command = '%s:([%s] %s)' % (table, '; '.join(keys), '; '.join(c)) else: command = '%s:(%s)' % (table, '; '.join(c)) self.lock.acquire() try: self.conn.sync(command) except Exception as e: Logger.error(self.__class__.__name__, "Error in creat statement(%s).\n%s" % (command, e)) finally: self.lock.release() return True
def parse_l2_depth(cls, instmt, raw): """ Parse raw data to L2 depth :param instmt: Instrument :param raw: Raw data in JSON """ # No order book mapping from config. Need to decode here. l2_depth = instmt.get_l2_depth() l2_depth.date_time = datetime.utcnow().strftime("%Y%m%d %H:%M:%S.%f") if isinstance(raw[0], list): # Start subscription for i in range(0, 25): bid = raw[i] ask = raw[25+i] l2_depth.bids[i] = L2Depth.Depth(price=bid[0], count=bid[1], volume=bid[2]) l2_depth.asks[i] = L2Depth.Depth(price=ask[0], count=ask[1], volume=-ask[2]) else: price = raw[1] count = raw[2] volume = raw[3] found = False if count == 0: # Deletion if volume > 0: for i in range(0, len(l2_depth.bids)): if price == l2_depth.bids[i].price: found = True del l2_depth.bids[i] break else: for i in range(0, len(l2_depth.asks)): if price == l2_depth.asks[i].price: found = True del l2_depth.asks[i] break if not found: depth_text = "" for i in range(0, l2_depth.depth): if i < len(l2_depth.bids): depth_text += "%.4f,%d,%.4f" % \ (l2_depth.bids[i].volume, \ l2_depth.bids[i].count, \ l2_depth.bids[i].price) else: depth_text += " " depth_text += "<--->" if i < len(l2_depth.asks): depth_text += "%.4f,%d,%.4f" % \ (l2_depth.asks[i].volume, \ l2_depth.asks[i].count, \ l2_depth.asks[i].price) else: depth_text += " " depth_text += "\n" Logger.info(cls.__name__, "Cannot find the deletion of the message: %s\nDepth:\n%s\n" % \ (raw, depth_text)) else: # Insertion/Update if volume > 0: # Update for i in range(0, len(l2_depth.bids)): if price == l2_depth.bids[i].price: l2_depth.bids[i].count = count l2_depth.bids[i].volume = volume found = True break if not found: # Insertion l2_depth.bids.append(L2Depth.Depth(price=price, count=count, volume=volume)) l2_depth.sort_bids() if len(l2_depth.bids) > l2_depth.depth * 2: del l2_depth.bids[l2_depth.depth:] else: for i in range(0, len(l2_depth.asks)): # Update if price == l2_depth.asks[i].price: l2_depth.asks[i].count = count l2_depth.asks[i].volume = -volume found = True break if not found: # Insertion l2_depth.asks.append(L2Depth.Depth(price=price, count=count, volume=-volume)) l2_depth.sort_asks() if len(l2_depth.asks) > l2_depth.depth * 2: del l2_depth.asks[l2_depth.depth:] return l2_depth
raise Exception("Unknown type (%s) in kdb client select statement.\n%s" % (type(select_ret), select_ret)) return ret def delete(self, table, condition='1==1'): """ Delete rows from the table :param table: Table name :param condition: Where condition """ if condition == '1==1': statement = 'delete from `%s' % (table) else: statement = 'delete from `%s where %s' % (table, self.parse_condition(condition)) self.conn.sync(statement) return True if __name__ == '__main__': Logger.init_log() db_client = KdbPlusClient() db_client.connect(host='localhost', port=5000) db_client.create('test', ['c1', 'c2', 'c3', 'c4'], ['varchar(20)', 'int', 'decimal(8, 20)', 'int'], [0], False) db_client.insert('test', ['c1', 'c2', 'c3', 'c4'], ['abc', 1, 1.1, 5]) db_client.insert('test', ['c1', 'c2', 'c3', 'c4'], ['efg', 2, 2.2, 6]) db_client.insert('test', ['c1', 'c2', 'c3', 'c4'], ['hij', 3, 3.3, 7]) # Logger.info('test', db_client.select('test', columns=['*'])) Logger.info('test', db_client.select('test', columns=['c2', 'c3'], condition='c1 >= "abc" and c2 > 1')) # Logger.info('test', db_client.select('test', columns=['*'], orderby='c1 desc', limit=1)) db_client.delete('test', 'c1="abc"')