def create(self, table, columns, types, is_ifnotexists=True): """ Create table in the database :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ file_path = os.path.join(self.file_directory, table + ".csv") columns = [e.split(' ')[0] for e in columns] if len(columns) != len(types): return False self.lock.acquire() if os.path.isfile(file_path): Logger.info(self.__class__.__name__, "File (%s) has been created already." % file_path) else: with open(file_path, 'w+') as csvfile: csvfile.write(','.join(["\"" + e + "\"" for e in columns]) + '\n') self.lock.release() return True
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): instmt_code_split = instmt.get_instmt_code().split('_') if len(instmt_code_split) == 3: # Future instruments instmt.set_order_book_channel_id("ok_sub_%s_%s_depth_%s_20" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) instmt.set_trades_channel_id("ok_sub_%s_%s_trade_%s" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) else: # Spot instruments instmt.set_order_book_channel_id("ok_sub_%s_depth_20" % instmt.get_instmt_code()) instmt.set_trades_channel_id("ok_sub_%s_trades" % instmt.get_instmt_code()) ws.send(self.api_socket.get_order_book_subscription_string(instmt)) ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def insert(self, table, columns, types, values, primary_key_index=[], is_orreplace=False, is_commit=True): """ Insert into the table :param table: Table name :param columns: Column array :param types: Type array :param values: Value array :param primary_key_index: An array of indices of primary keys in columns, e.g. [0] means the first column is the primary key :param is_orreplace: Indicate if the query is "INSERT OR REPLACE" """ if len(columns) != len(values): return False column_names = ','.join(columns) value_string = ','.join([SqlStorage.convert_str(e) for e in values]) if is_orreplace: sql = "%s %s (%s) values (%s)" % (self.replace_keyword(), table, column_names, value_string) else: sql = "insert into %s (%s) values (%s)" % (table, column_names, value_string) self.lock.acquire() try: self.execute(sql) if is_commit: self.commit() except Exception as e: Logger.info(self.__class__.__name__, "SQL error: %s\nSQL: %s" % (e, sql)) self.lock.release() return True
def on_close_handler(self, instmt, ws): """ Socket on close handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is unsubscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) instmt.set_subscribed(False)
def connect(self, **kwargs): """ Connect :param path: sqlite file to connect """ addr = kwargs['addr'] Logger.info(self.__class__.__name__, 'Zmq storage is connecting to %s' % addr) self.conn.bind(addr) return self.conn is not None
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ keys = message.keys() if 'info' in keys: Logger.info(self.__class__.__name__, message['info']) elif 'subscribe' in keys: Logger.info(self.__class__.__name__, 'Subscription of %s is %s' % \ (message['request']['args'], \ 'successful' if message['success'] else 'failed')) elif 'table' in keys: if message['table'] == 'trade': for trade_raw in message['data']: if trade_raw["symbol"] == instmt.get_instmt_code(): # Filter out the initial subscriptions trade = self.api_socket.parse_trade(instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif message['table'] == 'orderBookL2': l2_depth = self.api_socket.parse_l2_depth(instmt, message) if l2_depth is not None and l2_depth.is_diff(instmt.get_l2_depth()): instmt.set_prev_l2_depth(instmt.get_l2_depth()) instmt.set_l2_depth(l2_depth) instmt.incr_order_book_id() self.insert_order_book(instmt) else: Logger.info(self.__class__.__name__, json.dumps(message,indent=2)) else: Logger.error(self.__class__.__name__, "Unrecognised message:\n" + json.dumps(message))
def on_message_handler(self, instmt, messages): """ Incoming message handler :param instmt: Instrument :param message: Message """ for message in messages: keys = message.keys() if 'channel' in keys: if 'data' in keys: if message['channel'] == instmt.get_order_book_channel_id( ): data = message['data'] instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, data) # Insert only if the first 5 levels are different if instmt.get_l2_depth().is_diff( instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message['channel'] == instmt.get_trades_channel_id(): for trade_raw in message['data']: trade = self.api_socket.parse_trade( instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif 'success' in keys: Logger.info(self.__class__.__name__, "Subscription to channel %s is %s" \ % (message['channel'], message['success'])) else: Logger.info(self.__class__.__name__, ' - ' + json.dumps(message))
def connect(self, **kwargs): """ Connect :param path: sqlite file to connect """ host = kwargs['host'] port = kwargs['port'] Logger.info( self.__class__.__name__, 'Kdb+ database storage is connecting to %s:%d' % (host, port)) self.conn = qconnection.QConnection(host=host, port=port) self.conn.open() if self.conn.is_connected(): Logger.info(self.__class__.__name__, 'Connection to %s:%d is successful.' % (host, port)) else: Logger.info(self.__class__.__name__, 'Connection to %s:%d is failed.' % (host, port)) return self.conn.is_connected()
def main(): parser = argparse.ArgumentParser( description='Bitcoin exchange market data feed handler.') parser.add_argument('-instmts', action='store', help='Instrument subscription file.', default='subscriptions.ini') parser.add_argument('-exchtime', action='store_true', help='Use exchange timestamp.') parser.add_argument('-kdb', action='store_true', help='Use Kdb+ as database.') parser.add_argument('-csv', action='store_true', help='Use csv file as database.') parser.add_argument('-sqlite', action='store_true', help='Use SQLite database.') parser.add_argument('-mysql', action='store_true', help='Use MySQL.') parser.add_argument('-zmq', action='store_true', help='Use zmq publisher.') parser.add_argument( '-mysqldest', action='store', dest='mysqldest', help='MySQL destination. Formatted as <name:pwd@host:port>', default='') parser.add_argument('-mysqlschema', action='store', dest='mysqlschema', help='MySQL schema.', default='') parser.add_argument('-kdbdest', action='store', dest='kdbdest', help='Kdb+ destination. Formatted as <host:port>', default='') parser.add_argument( '-zmqdest', action='store', dest='zmqdest', help='Zmq destination. For example \"tcp://127.0.0.1:3306\"', default='') parser.add_argument('-sqlitepath', action='store', dest='sqlitepath', help='SQLite database path', default='') parser.add_argument('-csvpath', action='store', dest='csvpath', help='Csv file path', default='') parser.add_argument('-output', action='store', dest='output', help='Verbose output file path') args = parser.parse_args() Logger.init_log(args.output) storages = [] is_database_defined = False if args.sqlite: storage = SqliteStorage() storage.connect(path=args.sqlitepath) storages.append(storage) is_database_defined = True if args.mysql: storage = MysqlStorage() mysqldest = args.mysqldest logon_credential = mysqldest.split('@')[0] connection = mysqldest.split('@')[1] storage.connect(host=connection.split(':')[0], port=int(connection.split(':')[1]), user=logon_credential.split(':')[0], pwd=logon_credential.split(':')[1], schema=args.mysqlschema) storages.append(storage) is_database_defined = True if args.csv: if args.csvpath != '': storage = FileStorage(dir=args.csvpath) else: storage = FileStorage() storages.append(storage) is_database_defined = True if args.kdb: storage = KdbPlusStorage() storage.connect(host=args.kdbdest.split(':')[0], port=int(args.kdbdest.split(':')[1])) storages.append(storage) is_database_defined = True if args.zmq: storage = ZmqStorage() storage.connect(addr=args.zmqdest) storages.append(storage) is_database_defined = True if not is_database_defined: print('Error: Please define which database is used.') parser.print_help() sys.exit(1) # Subscription instruments if args.instmts is None or len(args.instmts) == 0: print( 'Error: Please define the instrument subscription list. You can refer to subscriptions.ini.' ) parser.print_help() sys.exit(1) # Use exchange timestamp rather than local timestamp if args.exchtime: ExchangeGateway.is_local_timestamp = False # Initialize subscriptions subscription_instmts = SubscriptionManager( args.instmts).get_subscriptions() if len(subscription_instmts) == 0: print( 'Error: No instrument is found in the subscription file. ' + 'Please check the file path and the content of the subscription file.' ) parser.print_help() sys.exit(1) # Initialize snapshot destination ExchangeGateway.init_snapshot_table(storages) Logger.info(__name__, 'Subscription file = %s' % args.instmts) log_str = 'Exchange/Instrument/InstrumentCode:\n' for instmt in subscription_instmts: log_str += '%s/%s/%s\n' % (instmt.exchange_name, instmt.instmt_name, instmt.instmt_code) Logger.info(__name__, log_str) gateways = [] # gateways.append(ExchGwBtccSpot(storages)) # gateways.append(ExchGwBtccFuture(storages)) # gateways.append(ExchGwBitmex(storages)) # gateways.append(ExchGwBitfinex(storages)) # gateways.append(ExchGwOkCoin(storages)) # gateways.append(ExchGwKraken(storages)) # gateways.append(ExchGwGdax(storages)) gateways.append(BitstampGateway(storages)) # gateways.append(ExchGwGatecoin(storages)) # gateways.append(ExchGwQuoine(storages)) # gateways.append(ExchGwPoloniex(storages)) # gateways.append(ExchGwBittrex(storages)) # gateways.append(ExchGwYunbi(storages)) # gateways.append(ExchGwLiqui(storages)) gateways.append(BinanceGateway(storages)) # gateways.append(ExchGwCryptopia(storages)) threads = [] for item in gateways: for instmt in subscription_instmts: if instmt.get_exchange_name() == item.get_exchange_name(): Logger.info(__name__, "Starting instrument %s-%s..." % \ (instmt.get_exchange_name(), instmt.get_instmt_name())) threads += item.start(instmt)
def parse_l2_depth(cls, instmt, raw): """ Parse raw data to L2 depth :param instmt: Instrument :param raw: Raw data in JSON """ # No order book mapping from config. Need to decode here. l2_depth = instmt.get_l2_depth() l2_depth.date_time = datetime.utcnow().strftime("%Y%m%d %H:%M:%S.%f") if isinstance(raw[0], list): # Start subscription for i in range(0, 25): bid = raw[i] ask = raw[25 + i] l2_depth.bids[i] = L2Depth.Depth(price=bid[0], count=bid[1], volume=bid[2]) l2_depth.asks[i] = L2Depth.Depth(price=ask[0], count=ask[1], volume=-ask[2]) else: price = raw[1] count = raw[2] volume = raw[3] found = False if count == 0: # Deletion if volume > 0: for i in range(0, len(l2_depth.bids)): if price == l2_depth.bids[i].price: found = True del l2_depth.bids[i] break else: for i in range(0, len(l2_depth.asks)): if price == l2_depth.asks[i].price: found = True del l2_depth.asks[i] break if not found: depth_text = "" for i in range(0, l2_depth.depth): if i < len(l2_depth.bids): depth_text += "%.4f,%d,%.4f" % \ (l2_depth.bids[i].volume, \ l2_depth.bids[i].count, \ l2_depth.bids[i].price) else: depth_text += " " depth_text += "<--->" if i < len(l2_depth.asks): depth_text += "%.4f,%d,%.4f" % \ (l2_depth.asks[i].volume, \ l2_depth.asks[i].count, \ l2_depth.asks[i].price) else: depth_text += " " depth_text += "\n" Logger.info(cls.__name__, "Cannot find the deletion of the message: %s\nDepth:\n%s\n" % \ (raw, depth_text)) else: # Insertion/Update if volume > 0: # Update for i in range(0, len(l2_depth.bids)): if price == l2_depth.bids[i].price: l2_depth.bids[i].count = count l2_depth.bids[i].volume = volume found = True break if not found: # Insertion l2_depth.bids.append( L2Depth.Depth(price=price, count=count, volume=volume)) l2_depth.sort_bids() if len(l2_depth.bids) > l2_depth.depth * 2: del l2_depth.bids[l2_depth.depth:] else: for i in range(0, len(l2_depth.asks)): # Update if price == l2_depth.asks[i].price: l2_depth.asks[i].count = count l2_depth.asks[i].volume = -volume found = True break if not found: # Insertion l2_depth.asks.append( L2Depth.Depth(price=price, count=count, volume=-volume)) l2_depth.sort_asks() if len(l2_depth.asks) > l2_depth.depth * 2: del l2_depth.asks[l2_depth.depth:] return l2_depth
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ if isinstance(message, dict): keys = message.keys() if 'event' in keys and message[ 'event'] == 'info' and 'version' in keys: Logger.info(self.__class__.__name__, "Bitfinex version: %s" % message['version']) elif 'event' in keys and message['event'] == 'subscribed': if instmt.get_instmt_code() == message['pair']: if message['channel'] == 'book': instmt.set_order_book_channel_id(message['chanId']) elif message['channel'] == 'trades': instmt.set_trades_channel_id(message['chanId']) else: raise Exception("Unknown channel %s : <%s>" % (message['channel'], message)) Logger.info(self.__class__.__name__, 'Subscription: %s, pair: %s, channel Id: %s' % \ (message['channel'], instmt.get_instmt_code(), message['chanId'])) elif isinstance(message, list): if message[0] == instmt.get_order_book_channel_id(): if isinstance(message[1], list): self.api_socket.parse_l2_depth(instmt, message[1]) elif len(message) != 2: instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, message) else: return if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message[0] == instmt.get_trades_channel_id(): # No recovery trade # if isinstance(message[1], list): # raw_trades = message[1] # raw_trades.sort(key=lambda x:x[0]) # for raw in raw_trades: # trade = self.api_socket.parse_trade(instmt, raw) # try: # if int(trade.trade_id) > int(instmt.get_exch_trade_id()): # instmt.incr_trade_id() # instmt.set_exch_trade_id(trade.trade_id) # self.insert_trade(instmt, trade) # except Exception as e: # Logger.info('test', "trade.trade_id(%s):%s" % (type(trade.trade_id), trade.trade_id)) # Logger.info('test', "instmt.get_exch_trade_id()(%s):%s" % (type(instmt.get_exch_trade_id()), instmt.get_exch_trade_id())) # raise e if message[1] == 'tu': trade = self.api_socket.parse_trade(instmt, message[3:]) if int(trade.trade_id) > int(instmt.get_exch_trade_id()): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade)
def create(self, table, columns, types, primary_key_index=[], is_ifnotexists=True): """ Create table in the database. Caveat - Assign the first few column as the keys!!! :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ if len(columns) != len(types): raise Exception("Incorrect create statement. Number of columns and that of types are different.\n%s\n%s" % \ (columns, types)) if is_ifnotexists: ret = self.conn("\\v") if ret is not None: for t in ret: if table == self.decode_qtypes(t): Logger.info(self.__class__.__name__, "Table %s has been created." % table) return True Logger.info(self.__class__.__name__, "Table %s is going to be created." % table) c = columns[:] for i in range(0, len(types)): t = self.convert_type(types[i]) if t is str: if columns[i].find('date_time') > -1: c[i] += ":`timestamp$()" else: c[i] += ":`symbol$()" elif t is float: c[i] += ":`float$()" elif t is int: c[i] += ":`long$()" keys = [] for i in primary_key_index: keys.append(c[i]) for i in sorted(primary_key_index, reverse=True): del c[i] if len(keys) > 0: command = '%s:([%s] %s)' % (table, '; '.join(keys), '; '.join(c)) else: command = '%s:(%s)' % (table, '; '.join(c)) self.lock.acquire() try: self.conn.sync(command) except Exception as e: Logger.error(self.__class__.__name__, "Error in creat statement(%s).\n%s" % (command, e)) finally: self.lock.release() return True
:param table: Table name :param condition: Where condition """ if condition == '1==1': statement = 'delete from `%s' % (table) else: statement = 'delete from `%s where %s' % ( table, self.parse_condition(condition)) self.conn.sync(statement) return True if __name__ == '__main__': Logger.init_log() db_storage = KdbPlusClient() db_storage.connect(host='localhost', port=5000) db_storage.create('test', ['c1', 'c2', 'c3', 'c4'], ['varchar(20)', 'int', 'decimal(8, 20)', 'int'], [0], False) db_storage.insert('test', ['c1', 'c2', 'c3', 'c4'], ['abc', 1, 1.1, 5]) db_storage.insert('test', ['c1', 'c2', 'c3', 'c4'], ['efg', 2, 2.2, 6]) db_storage.insert('test', ['c1', 'c2', 'c3', 'c4'], ['hij', 3, 3.3, 7]) # Logger.info('test', db_storage.select('test', columns=['*'])) Logger.info( 'test', db_storage.select('test', columns=['c2', 'c3'], condition='c1 >= "abc" and c2 > 1')) # Logger.info('test', db_storage.select('test', columns=['*'], orderby='c1 desc', limit=1)) db_storage.delete('test', 'c1="abc"')