def register(self, instruments): if isinstance(instruments, dict): instruments = list(instruments.values()) if not isinstance(instruments, list): return db = pd.read_csv(self.args['symbols'], header=0).fillna("") instruments = pd.DataFrame(instruments) instruments.columns = db.columns db = db.append(instruments).drop_duplicates(keep="first") db.to_csv(self.args['symbols'], header=True, index=False) tools.chmod(self.args['symbols'])
def log_trade(self, trade): # first trade is an exit? if trade['entry_time'] is None: return # connection established if (self.dbconn is not None) & (self.dbcurr is not None): sql = """INSERT INTO trades ( `algo`, `symbol`, `direction`,`quantity`, `entry_time`, `exit_time`, `exit_reason`, `order_type`, `market_price`, `target`, `stop`, `entry_price`, `exit_price`, `realized_pnl`) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ON DUPLICATE KEY UPDATE `algo`=%s, `symbol`=%s, `direction`=%s, `quantity`=%s, `entry_time`=%s, `exit_time`=%s, `exit_reason`=%s, `order_type`=%s, `market_price`=%s, `target`=%s, `stop`=%s, `entry_price`=%s, `exit_price`=%s, `realized_pnl`=%s """ try: trade['entry_time'] = trade['entry_time'].strftime( "%Y-%m-%d %H:%M:%S.%f") except Exception as e: pass try: trade['exit_time'] = trade['exit_time'].strftime( "%Y-%m-%d %H:%M:%S.%f") except Exception as e: pass # all strings for k, v in trade.items(): if v is not None: trade[k] = str(v) self.dbcurr.execute(sql, ( trade['strategy'], trade['symbol'], trade['direction'], trade['quantity'], trade['entry_time'], trade['exit_time'], trade['exit_reason'], trade['order_type'], trade['market_price'], trade['target'], trade['stop'], trade['entry_price'], trade['exit_price'], trade['realized_pnl'], trade['strategy'], trade['symbol'], trade['direction'], trade['quantity'], trade['entry_time'], trade['exit_time'], trade['exit_reason'], trade['order_type'], trade['market_price'], trade['target'], trade['stop'], trade['entry_price'], trade['exit_price'], trade['realized_pnl'] )) # commit try: self.dbconn.commit() except Exception as e: pass if self.trade_log_dir: self.trade_log_dir = (self.trade_log_dir + '/').replace('//', '/') trade_log_path = self.trade_log_dir + self.strategy.lower() + "_" + \ datetime.now().strftime('%Y%m%d') + ".csv" # convert None to empty string !! trade.update((k, '') for k, v in trade.items() if v is None) # create df trade_df = pd.DataFrame(index=[0], data=trade)[[ 'strategy', 'symbol', 'direction', 'quantity', 'entry_time', 'exit_time', 'exit_reason', 'order_type', 'market_price', 'target', 'stop', 'entry_price', 'exit_price', 'realized_pnl' ]] if os.path.exists(trade_log_path): trades = pd.read_csv(trade_log_path, header=0) trades = trades.append(trade_df, ignore_index=True, sort=True) trades.drop_duplicates(['entry_time', 'symbol', 'strategy'], keep="last", inplace=True) trades.to_csv(trade_log_path, header=True, index=False) tools.chmod(trade_log_path) else: trade_df.to_csv(trade_log_path, header=True, index=False) tools.chmod(trade_log_path)
def run(self): """Starts the blotter Connects to the TWS/GW, processes and logs market data, and broadcast it over TCP via ZeroMQ (which algo subscribe to) """ self._check_unique_blotter() # connect to mysql self.mysql_connect() self.context = zmq.Context(zmq.REP) self.socket = self.context.socket(zmq.PUB) self.socket.bind("tcp://*:" + str(self.args['zmqport'])) db_modified = 0 contracts = [] prev_contracts = [] first_run = True logging.info("Connecting to Interactive Brokers...") self.ibConn = ezIBpy() self.ibConn.ibCallback = self.ibCallback while not self.ibConn.connected: self.ibConn.connect(clientId=int(self.args['ibclient']), port=int(self.args['ibport']), host=str(self.args['ibserver'])) time.sleep(1) if not self.ibConn.connected: print('*', end="", flush=True) logging.info("Connection established...") try: while True: if not os.path.exists(self.args['symbols']): pd.DataFrame(columns=[ 'symbol', 'sec_type', 'exchange', 'currency', 'expiry', 'strike', 'opt_type' ]).to_csv(self.args['symbols'], header=True, index=False) tools.chmod(self.args['symbols']) else: time.sleep(0.1) # read db properties db_data = os.stat(self.args['symbols']) db_size = db_data.st_size db_last_modified = db_data.st_mtime # empty file if db_size == 0: if len(prev_contracts) > 0: logging.info('Cancel market data...') self.ibConn.cancelMarketData() time.sleep(0.1) prev_contracts = [] continue # modified? if (first_run == False) & (db_last_modified == db_modified): continue # continue... db_modified = db_last_modified # read contructs db df = pd.read_csv(self.args['symbols'], header=0) if len(df.index) == 0: continue # removed expired df = df[( (df['expiry'] < 1000000) & (df['expiry'] >= int(datetime.now().strftime('%Y%m')))) | ((df['expiry'] >= 1000000) & (df['expiry'] >= int(datetime.now( ).strftime('%Y%m%d')))) | isnan(df['expiry'])] df.fillna("", inplace=True) df.to_csv(self.args['symbols'], header=True, index=False) tools.chmod(self.args['symbols']) df = df[df['symbol'].str.contains("#") == False] # ignore commentee contracts = [tuple(x) for x in df.values] if first_run: first_run = False else: if contracts != prev_contracts: # cancel market data for removed contracts for contract in prev_contracts: if contract not in contracts: self.ibConn.cancelMarketData( self.ibConn.createContract(contract)) time.sleep(0.1) contract_string = self.ibConn.contractString( contract).split('_')[0] logging.info('Contract Removed [' + contract_string + ']') # request market data for contract in contracts: if contract not in prev_contracts: self.ibConn.requestMarketData( self.ibConn.createContract(contract)) time.sleep(0.1) contract_string = self.ibConn.contractString( contract).split('_')[0] logging.info('Contract Added [' + contract_string + ']') # update latest contracts prev_contracts = contracts time.sleep(2) except (KeyboardInterrupt, SystemExit): print("\n\n>>> Interrupted with Ctrl-c...") sys.exit(1)
def _write_cached_args(self): pickle.dump(self.args, open(self.args_cache_file, "wb")) tools.chmod(self.args_cache_file)