def test_ibgw_port(host): account = os.environ['IB_ACCOUNT'] password = os.environ['IB_PASSWORD'] trade_mode = os.environ['TRADE_MODE'] # build local ./Dockerfile subprocess.check_call(['docker', 'build', '-t', IMAGE_NAME, '.']) # run a container docker_id = subprocess.check_output( ['docker', 'run', '--env', 'IB_ACCOUNT={}'.format(account), '--env', 'IB_PASSWORD={}'.format(password), '--env', 'TRADE_MODE={}'.format(trade_mode), '-p', '4002:4002', '-d', IMAGE_NAME, "tail", "-f", "/dev/null"]).decode().strip() time.sleep(60) ib = IB() ib.connect('localhost', 4002, clientId=1) contract = Forex('EURUSD') bars = ib.reqHistoricalData( contract, endDateTime='', durationStr='30 D', barSizeSetting='1 hour', whatToShow='MIDPOINT', useRTH=True) # convert to pandas dataframe: df = util.df(bars) print(df) # at the end of the test suite, destroy the container subprocess.check_call(['docker', 'rm', '-f', docker_id])
def test_ibgw_restart(ib_docker): subprocess.check_output( ['docker', 'container', 'stop', ib_docker]).decode().strip() subprocess.check_output( ['docker', 'container', 'start', ib_docker]).decode().strip() ib = IB() wait = 60 while not ib.isConnected(): try: IB.sleep(1) ib.connect('localhost', 4002, clientId=999) except: pass wait -= 1 if wait <= 0: break contract = Forex('EURUSD') bars = ib.reqHistoricalData( contract, endDateTime='', durationStr='30 D', barSizeSetting='1 hour', whatToShow='MIDPOINT', useRTH=True) # convert to pandas dataframe: df = util.df(bars) print(df)
def plot_contracts_bars(ib: IB, contracts: List[Contract], end: Optional[datetime.datetime] = None, bars_settings: Optional[List[Tuple[str, str]]] = None, bar_type: Optional[str] = "TRADES"): if end is None: end = datetime.datetime.now().replace(tzinfo=datetime.timezone.utc) if bars_settings is None or len(bars_settings) < 1: bars_settings = [ ("2 Y", "1 day"), ("6 M", "4 hours"), ("1 W", "10 mins"), ("1 D", "1 min"), ] for durationStr, barSizeSetting in bars_settings: fig, ax = plt.subplots(figsize=(24, 14)) for contract in contracts: bars = util.df( ib.reqHistoricalData( contract, endDateTime=end, durationStr=durationStr, barSizeSetting=barSizeSetting, whatToShow=bar_type, useRTH=True, formatDate=2 # UTC )) ys = np.cumprod(np.insert(1 + np.diff(np.log(bars["close"])), 0, 1)) xs = bars["date"].values ax.plot(xs, ys, label=contract.symbol) ax.set_title = f"{durationStr} - {barSizeSetting} bars" plt.xticks(rotation=45) plt.legend()
def spx_bar_history(update_bars=True): file_name = 'sp500_5min_bars' df_hist = read_feather(UpdateSP500Data.DATA_BASE_PATH / file_name) # Download latest if update_bars: ib = IB() ib.connect('127.0.0.1', port=4001, clientId=40) contract = Index('SPX', 'CBOE', 'USD') bars = ib.reqHistoricalData( contract, endDateTime='', durationStr='1 M', barSizeSetting='5 mins', whatToShow='TRADES', useRTH=True, formatDate=1) ib.disconnect() df = util.df(bars) df = df.set_index('date') full_hist = df.combine_first(df_hist) write_feather(full_hist, UpdateSP500Data.DATA_BASE_PATH / file_name) else: full_hist = df_hist.copy() return full_hist
def test_request_historical_data(ib: IB, contract: Contract, barSizeSetting: str): contract = Forex('EURUSD') bars = ib.reqHistoricalData( contract, endDateTime='', durationStr='1 D', barSizeSetting='10 secs', # whatToShow='ADJUSTED_LAST', whatToShow='MIDPOINT', useRTH=True, formatDate=1, keepUpToDate=True, ) bars.updateEvent += on_bar_update return bars
class Window(QWidget): def __init__(self, host, port, clientId): QWidget.__init__(self) self.edit = QLineEdit('', self) self.edit.editingFinished.connect(self.add) self.connectButton = QPushButton('Connect') self.connectButton.clicked.connect(self.onConnectButtonClicked) layout = QVBoxLayout(self) layout.addWidget(self.edit) layout.addWidget(self.connectButton) self.graph = TickerGraph(self, width=5, height=4) self.graph.move(0, 0) layout.addWidget(self.graph) self.connectInfo = (host, port, clientId) self.ib = IB() def add(self, text=''): text = text #or self.edit.text() if text: contract = eval(text) if (contract and self.ib.qualifyContracts(contract)): data = self.ib.reqHistoricalData(contract, endDateTime='', durationStr='30 D', barSizeSetting='4 hours', whatToShow='MIDPOINT', useRTH=True) df = util.df(data) print(df["close"]) self.graph.plot(df["close"]) self.edit.setText(text) def onConnectButtonClicked(self, _): if self.ib.isConnected(): self.ib.disconnect() self.connectButton.setText('Connect') else: self.ib.connect(*self.connectInfo) self.connectButton.setText('Disconnect') self.add(f"Forex('" + str(self.edit.text()) + "')") def closeEvent(self, ev): asyncio.get_event_loop().stop()
def request_historical_data(ib: IB, contract: Contract, barSizeSetting: str): """ 订阅历史 K 线 :param ib: ib_insync.IB 实例 :param contract: 标的 :param barSizeSetting: K 线类型, 参见 https://interactivebrokers.github.io/tws-api/historical_bars.html 的 Valid Bar Sizes :return: BarDataList """ bars = ib.reqHistoricalData( contract, endDateTime='', durationStr='1 D', barSizeSetting=barSizeSetting, # whatToShow='ADJUSTED_LAST', whatToShow='MIDPOINT', useRTH=True, formatDate=1, keepUpToDate=True, ) return bars
def test_ibgw_interactive(ib_docker): ib = IB() wait = 120 while not ib.isConnected(): try: IB.sleep(1) ib.connect('localhost', 4002, clientId=999) except: pass wait -= 1 if wait <= 0: break contract = Forex('EURUSD') bars = ib.reqHistoricalData( contract, endDateTime='', durationStr='30 D', barSizeSetting='1 hour', whatToShow='MIDPOINT', useRTH=True) # convert to pandas dataframe: df = util.df(bars) print(df)
class GatherData: def __init__(self): self.analyze = AnalyzeData() access_type = 'tws' client_id = 0 if access_type == 'gateway': __port = 4001 elif access_type == 'tws': __port = 7497 __host = '127.0.0.1' self.ib = IB() self.ib.connect(__host, __port, clientId=client_id) self.db = Database() self.conn = self.db.connect_() self.limit = 50 def load_scanner(self, scanner): ss = ScannerSubscription() ss.instrument = "STK" # STOCK.EU ss.locationCode = "STK.NASDAQ" # US.MAJOR ss.scanCode = scanner scan = self.ib.reqScannerData(ss) # ss.cancelScannerSubscription(ss) # self.ib.cancelScannerSubscription(ss) # arr = np.array(scan) return scan @classmethod def fields_scanner(cls, unix_ts, dt_ts, scanner, data_): fields = [] for line in data_: _symbol = line.contractDetails.contract.symbol _rank = line.rank fields.append([unix_ts, dt_ts, scanner, _symbol, _rank, BAR_SIZE]) return fields @classmethod def insert_scanner(cls, unix_ts, dt_ts, scanner, data_, cursor): # if isinstance(data_, np.ndarray) or isinstance(data_, np.ndarray): if not isinstance(data_, Exception): insert_data = cls.fields_scanner(unix_ts, dt_ts, scanner, data_) query = "insert into hist_scanner values(?,?,?,?,?,?);".format( insert_data) cursor.executemany(query, insert_data) else: cursor.execute( "insert into hist_scanner values({},{},{},{},{},{});".format( unix_ts, dt_ts, scanner, np.nan, np.nan, BAR_SIZE)) @classmethod def save_scanner(cls, unix_ts, dt_ts, scanner, data_): path_ = str(os.path.dirname( os.path.realpath(__file__))) + '\\hist_files\\scanners\\' saving_data = cls.fields_scanner(unix_ts, dt_ts, scanner, data_) df = pd.DataFrame(saving_data) df.to_csv(path_ + str(unix_ts) + '_' + scanner + '.csv', ',', index=False) def load_hist_data(self, dt_ts_str, symbol, duration_secs): contract = Stock(symbol, 'SMART', 'USD') data_ = self.ib.reqHistoricalData( contract, endDateTime=''.format(dt_ts_str), durationStr='{} S'.format(duration_secs), barSizeSetting='{} secs'.format(BAR_SIZE), whatToShow='ADJUSTED_LAST', useRTH=True, formatDate=1) # ss.cancelScannerSubscription(ss) # self.ib.cancelScannerSubscription(ss) # arr = np.array(scan) return data_ @classmethod def fields_hist_data(cls, data_, symbol): fields = [] for i in data_: unix_ts = time.mktime(i.date.timetuple()) fields.append([ int(unix_ts), i.date, symbol, float(i.open), float(i.high), float(i.low), float(i.close), float(i.volume) ]) return fields @classmethod def insert_hist_data(cls, data_, symbol, cursor): insert_data = cls.fields_hist_data(data_, symbol) query = "insert into hist_ohlcv values(?,?,?,?,?,?,?,?);".format( insert_data) cursor.executemany(query, insert_data) def run_scanner(self, on_this_tz, off_this_tz, save_mode='db'): done = False while True: now = datetime.datetime.now().time() while now > on_this_tz and now < off_this_tz: # while schedule_agent(on_this_tz, off_this_tz): previous_unix_ts = int(time.time()) previous_dt_ts = datetime.datetime.now() with self.conn.cursor() as cursor: try: # if isinstance(self.conn, self.db.Connection): for scanner in SCANNER_SCOPE: data_ = self.load_scanner(scanner) if save_mode == 'db': self.insert_scanner(previous_unix_ts, previous_dt_ts, scanner, data_, cursor) print( 'requested and inserted {} scanners, bar size is {}' .format(str(len(SCANNER_SCOPE)), str(BAR_SIZE))) elif save_mode == 'files': self.save_scanner(previous_unix_ts, previous_dt_ts, scanner, data_) print( 'requested and saved {} scanners, bar size is {}' .format(str(len(SCANNER_SCOPE)), str(BAR_SIZE))) except Exception as e: print('error occured and inserted:', e) self.insert_scanner(previous_unix_ts, previous_dt_ts, np.nan, e, cursor) current_unix_ts = int(time.time()) print('waiting for next bar:', BAR_SIZE) while current_unix_ts - previous_unix_ts < BAR_SIZE: current_unix_ts = int(time.time()) done = True now = datetime.datetime.now().time() else: time.sleep(1) if done: dt_ts_str, scanners = self.analyze.get_todays_insertions( self.conn) matches = self.analyze.query_matches(dt_ts_str, scanners, self.limit, self.conn) insertion_li = [] # dt_ts_str = '20181026 09:40:00' for match in matches: try: hist_data = self.load_hist_data( dt_ts_str, match, 10 * 60) insertion_li.append(hist_data) with self.conn.cursor() as cursor: self.insert_hist_data(hist_data, match, cursor) except IndexError: print('stop') done = False
def currency_exchange(request, currencies='EURUSD'): # print(currencies) ss1 = 'ss1' ss2 = 'ss2' ss3 = 'ss3' ss4 = 'ss4' ss5 = 'ss5' ss6 = 'ss6' ss7 = 'ss7' ss8 = 'ss8' ss9 = 'ss9' df = None is_error = 0 try: loop = asyncio.get_event_loop_policy().new_event_loop() asyncio.set_event_loop(loop) ib_server = 'twoprojectsib1_tws_1' ib_port = 4003 if settings.DEBUG: ib_server = '127.0.0.1' ib_port = 4002 ib_ = IB() ci = randint(0, 100000) ib_.connect(ib_server, ib_port, clientId=ci) # print('ib_') # print(ib_) # print('ib_') ss1 = str(ib_) except Exception as e: ss1 = "Error connecting to: " + ib_server + ":" + str(ib_port) ss2 = e is_error = 1 try: c = Forex(currencies) bars = ib_.reqHistoricalData(c, endDateTime='', durationStr='1 D', barSizeSetting='1 min', whatToShow='MIDPOINT', useRTH=True) # print(bars) ss3 = 'good 3' df = util.df(bars) # print(df[['date', 'open', 'high', 'low', 'close']]) ss4 = 'good 4' df = df.sort_values(by=['date'], ascending=False) ss5 = 'good 5' ib_.disconnect() del ib_ ss6 = 'good 6' except Exception as e2: ss7 = e2 is_error = 1 context = { 'df': df, 'ss1': ss1, 'ss2': ss2, 'ss3': ss3, 'ss4': ss4, 'ss5': ss5, 'ss6': ss6, 'ss7': ss7, 'ss8': ss8, 'ss9': ss9, 'is_error': is_error, 'currencies': currencies, 'title': 'Currency Exchange', 'cur_list': [ 'GBPUSD', 'GBPZAR', 'HKDJPY', 'KRWAUD', 'KRWCAD', 'KRWCHF', 'KRWEUR', 'KRWGBP', 'KRWHKD', 'KRWJPY', 'KRWUSD', 'MXNJPY', 'NOKJPY', 'NOKSEK', 'NZDCAD', 'NZDCHF', 'NZDJPY', 'NZDUSD', 'SEKJPY', 'SGDCNH', 'SGDJPY', 'TRYJPY', 'USDCAD', 'USDCHF', 'USDCNH', 'USDCZK', 'USDDKK', 'USDHKD', 'USDHUF', 'USDILS', 'USDJPY', 'USDKRW', 'USDMXN', 'USDNOK', 'USDPLN', 'USDRUB', 'USDSEK', 'USDSGD', 'USDTRY', 'USDZAR', 'ZARJPY', 'EURPLN', 'EURRUB', 'EURSEK', 'EURSGD', 'EURTRY', 'EURUSD', 'EURZAR', 'GBPAUD', 'GBPCAD', 'GBPCHF', 'GBPCNH', 'GBPCZK', 'GBPDKK', 'GBPHKD', 'GBPHUF', 'GBPJPY', 'GBPMXN', 'GBPNOK', 'GBPNZD', 'GBPPLN', 'GBPSEK', 'GBPSGD', 'GBPTRY', 'GBPUSD', 'GBPZAR', 'HKDJPY', 'KRWAUD', 'KRWCAD', 'KRWCHF', 'KRWEUR', 'KRWGBP', 'KRWHKD', 'KRWJPY', 'KRWUSD', 'MXNJPY', 'NOKJPY', 'NOKSEK', 'NZDCAD', 'NZDCHF', 'NZDJPY', 'NZDUSD', 'SEKJPY', 'SGDCNH', 'SGDJPY', 'TRYJPY', 'USDCAD', 'USDCHF', 'USDCNH', 'USDCZK', 'USDDKK', 'USDHKD', 'USDHUF', 'USDILS', 'USDJPY', 'USDKRW', 'USDMXN', 'USDNOK', 'USDPLN', 'USDRUB', 'USDSEK', 'USDSGD', 'USDTRY', 'USDZAR', 'ZARJPY' ] } return render(request, 'trades/currency_exchange.html', context)
class Live(IB): def __init__(self, symbol, temp, client, verbose=False, notification=False): self.symbol = symbol instruments = pd.read_csv('instruments.csv').set_index('symbol') params = instruments.loc[self.symbol] self.market = str(params.market) self.exchange = str(params.exchange) self.temp = temp self.tick_size = float(params.tick_size) self.digits = int(params.digits) self.leverage = int(params.leverage) self.client = client self.verbose = verbose self.notification = notification self.ib = IB() print(self.ib.connect('127.0.0.1', 7497, client)) self.get_contract() self.data = self.download_data(tempo=self.temp, duration='1 D') self.current_date() self.pool = pd.DataFrame(columns=[ 'date', 'id', 'type', 'lots', 'price', 'S/L', 'T/P', 'commission', 'comment', 'profit' ]) self.history = pd.DataFrame(columns=[ 'date', 'id', 'type', 'lots', 'price', 'S/L', 'T/P', 'commission', 'comment', 'profit' ]) self.pending = pd.DataFrame(columns=[ 'date', 'id', 'type', 'lots', 'price', 'S/L', 'T/P', 'commission', 'comment', 'profit' ]) self.position = 0 self.number = 0 def get_contract(self): if self.market == 'futures': expiration = self.ib.reqContractDetails( Future(self.symbol, self.exchange))[0].contract.lastTradeDateOrContractMonth self.contract = Future(symbol=self.symbol, exchange=self.exchange, lastTradeDateOrContractMonth=expiration) elif self.market == 'forex': self.contract = Forex(self.symbol) elif self.market == 'stocks': self.contract = Stock(symbol=self.symbol, exchange=self.exchange, currency='USD') def download_data(self, tempo, duration): pr = (lambda mark: 'TRADES' if mark == 'futures' else ('TRADES' if mark == 'stocks' else 'MIDPOINT'))(self.market) historical = self.ib.reqHistoricalData(self.contract, endDateTime='', durationStr=duration, barSizeSetting=tempo, whatToShow=pr, useRTH=True, keepUpToDate=True) return historical def data_to_df(self, data): df = util.df(data)[['date', 'open', 'high', 'low', 'close', 'volume']].set_index('date') df.index = pd.to_datetime(df.index) return df def send_telegram_message(self, msg): '''Sends a telegram message ''' requests.post( 'https://api.telegram.org/bot804823606:AAFq-YMKr4hIjQ4N5M8GYCGa5w9JJ1kIunk/sendMessage', data={ 'chat_id': '@ranguito_channel', 'text': msg }) def current_date(self): self.date = datetime.now().strftime('%Y-%m-%d') self.weekday = datetime.now().weekday() self.hour = datetime.now().strftime('%H:%M:%S') def pool_check(self): '''Check pool trades''' if self.position == 0: self.pool = pd.DataFrame(columns=[ 'date', 'type', 'lots', 'price', 'S/L', 'T/P', 'commission', 'comment', 'profit' ]) def calculate_profit(self, type, price, lots): '''Calculates profit''' if type == 'BUY': profit = (lambda pos: 0 if pos >= 0 else (self.pool[self.pool.type == 'SELL'])[ 'price'].iloc[0] - price)(self.position) else: profit = (lambda pos: 0 if pos <= 0 else price - (self.pool[self.pool.type == 'BUY'])['price'].iloc[0])( self.position) return profit * self.leverage * lots def order_values(self, order_id): price = 0 commission = 0 if len(self.ib.fills()) > 0: for trade in util.tree(self.ib.fills()): if ('OrderId' and 'clientId') in trade[1]['Execution']: if ((nested_lookup('orderId', trade)[0] == order_id) and (nested_lookup('clientId', trade)[0] == self.client)): commission = nested_lookup('commission', trade)[0] price = nested_lookup('price', trade)[0] return (price, commission) def order_send(self, type, lots, sl=0, tp=0, comment=''): market_order = MarketOrder(type, lots) #initial_margin, maintenance_margin = self.get_margins(market_order) self.ib.placeOrder(self.contract, market_order) id = market_order.orderId self.number += 1 price = 0 while price == 0: self.ib.sleep(1) price, commission = self.order_values(id) profit = self.calculate_profit(type, price, lots) trade = { 'date': str(self.date) + ' ' + str(self.hour), 'id': id, 'type': type, 'lots': lots, 'price': price, 'S/L': sl, 'T/P': tp, 'commission': commission, 'comment': comment, 'profit': profit } self.save_trade(trade) self.pool = pd.concat( [self.pool, pd.DataFrame(trade, index=[self.number])], sort=False) self.history = pd.concat( [self.history, pd.DataFrame(trade, index=[self.number])], sort=False) mult = (lambda dir: 1 if dir == 'BUY' else -1)(type) self.position += (mult * lots) self.pool_check() if self.verbose: print('%s %s | %sING %d units at %5.2f in %s' % (str( self.date), str(self.hour), type, lots, price, self.symbol)) if self.notification: if self.position != 0: self.send_message_in(type, price, sl, tp, lots) else: self.send_message_out(type, price, lots, profit, commission, commission) def bracket_stop_order(self, type, lots, entry_price, sl=0, tp=0, comment=''): bracket_order = self.ib.bracketStopOrder(type, lots, entry_price, tp, sl) #initial_margin, maintenance_margin = self.get_margins(bracket_order[0]) for order in bracket_order: self.ib.placeOrder(self.contract, order) id_entry = bracket_order[0].orderId id_tp = bracket_order[1].orderId id_sl = bracket_order[2].orderId trade = { 'date': str(self.date) + ' ' + str(self.hour), 'id': id_entry, 'type': bracket_order[0].action, 'lots': lots, 'price': entry_price, 'S/L': sl, 'T/P': tp, 'commission': 0, 'comment': comment, 'profit': 0 } self.pending = pd.concat( [self.pending, pd.DataFrame(trade, index=[id_entry])], sort=False) trade = { 'date': str(self.date) + ' ' + str(self.hour), 'id': id_tp, 'type': bracket_order[1].action, 'lots': lots, 'price': tp, 'S/L': 0, 'T/P': 0, 'commission': 0, 'comment': comment, 'profit': 0 } self.pending = pd.concat( [self.pending, pd.DataFrame(trade, index=[id_entry])], sort=False) trade = { 'date': str(self.date) + ' ' + str(self.hour), 'id': id_sl, 'type': bracket_order[2].action, 'lots': lots, 'price': sl, 'S/L': 0, 'T/P': 0, 'commission': 0, 'comment': comment, 'profit': 0 } self.pending = pd.concat( [self.pending, pd.DataFrame(trade, index=[id_entry])], sort=False) return (bracket_order[0], bracket_order[1], bracket_order[2]) def pending_check(self, order): id = order.orderId if len(self.pending) > 0: price, commission = self.order_values(id) if price > 0: self.number += 1 order_select = self.pending[self.pending.id == id] profit = self.calculate_profit(order_select.type.iloc[0], price, order_select.lots.iloc[0]) trade = { 'date': str(self.date) + ' ' + str(self.hour), 'id': id, 'type': order_select.type.iloc[0], 'lots': order_select.lots.iloc[0], 'price': price, 'S/L': order_select['S/L'].iloc[0], 'T/P': order_select['T/P'].iloc[0], 'commission': commission, 'comment': '', 'profit': profit } self.save_trade(trade) self.pool = pd.concat( [self.pool, pd.DataFrame(trade, index=[self.number])], sort=False) self.history = pd.concat( [self.history, pd.DataFrame(trade, index=[self.number])], sort=False) mult = (lambda dir: 1 if dir == 'BUY' else -1)(order_select.type.iloc[0]) self.position += (mult * order_select.lots.iloc[0]) self.pool_check() if self.verbose: print('%s %s | %sING %d units at %5.2f in %s' % (str(self.date), str( self.hour), order_select.type.iloc[0], order_select.lots.iloc[0], price, self.symbol)) if self.notification: if self.position != 0: self.send_message_in(order_select.type.iloc[0], price, order_select['S/L'].iloc[0], order_select['T/P'].iloc[0], order_select.lots.iloc[0]) else: self.send_message_out(order_select.type.iloc[0], price, order_select.lots.iloc[0], profit, commission, commission) return True else: return False def get_margins(self, order): init_margin = float( self.ib.whatIfOrder(self.contract, order).initMarginChange) maint_margin = float( self.ib.whatIfOrder(self.contract, order).maintMarginChange) return (init_margin, maint_margin) def send_message_in(self, type, price_in, sl, tp, lots): msg_in = '%s Opened in %s \nPrice: %5.2f \nS/L: %5.2f \nT/P: %5.2f \nLots: %d \nAt: %s' % ( type, self.symbol, price_in, sl, tp, lots, self.hour) self.send_telegram_message(msg_in) def send_message_out(self, type, price_out, lots, profit, comm_in, comm_out): msg_out = '%s Closed in %s \nPrice: %5.2f \nProfit(USD): %5.2f \nCommissions(USD): %5.2f \nAt: %s' % \ (type, self.symbol, price_out, profit, (comm_in+comm_out),self.hour) self.send_telegram_message(msg_out) def save_trade(self, trade): if not path.exists('history_trades_%s.csv' % self.symbol): initial = pd.DataFrame(columns=[ 'date', 'id', 'type', 'lots', 'price', 'S/L', 'T/P', 'commission', 'comment', 'profit' ]).set_index('date') initial.to_csv('history_trades_%s.csv' % self.symbol) history = pd.read_csv('history_trades_%s.csv' % self.symbol) trade = pd.DataFrame(trade, index=[0]) history = pd.concat([history, trade], sort=False) history['net profit'] = history['profit'] - history['commission'] history['accumulated profit'] = history['net profit'].cumsum() history['max profit'] = history['accumulated profit'].cummax() history.set_index('date').to_csv('history_trades_%s.csv' % self.symbol)
class Basem: ''' 导入分钟级别股票信息类 ''' def __init__(self): self.log = log(__name__, 'logs/basem.log') self.db = Basedb() self.empty = [] self.total = 0 self.i = 0 self.ib = IB() self.ib.connect(Config.ib_host, Config.ib_port, Config.ib_client_id) def __del__(self): self.ib.disconnect() def deal_data(self, future, symbol): ''' 回调函数,处理接口返回的股票数据 ''' self.i += 1 print('(%d/%d) 正在导入 %s HK' % (self.i, self.total, symbol), flush=True) data = future.result() if not data: self.empty.append((symbol,)) return open_sql = 'insert into `open_5m` (`code`, `code_type`, `date`, `value`) values ' high_sql = 'insert into `high_5m` (`code`, `code_type`, `date`, `value`) values ' low_sql = 'insert into `low_5m` (`code`, `code_type`, `date`, `value`) values ' close_sql = 'insert into `close_5m` (`code`, `code_type`, `date`, `value`) values ' volume_sql = 'insert into `volume_5m` (`code`, `code_type`, `date`, `value`) values ' average_sql = 'insert into `average_5m` (`code`, `code_type`, `date`, `value`) values ' for bar_data in data: date = bar_data.date open_price = bar_data.open high = bar_data.high low = bar_data.low close = bar_data.close average = bar_data.average # volume 有不存在的情况, 16:00 收市,交易量不存在 try: volume = bar_data.volume except AttributeError: volume = 0 open_sql += "('{code}', '{code_type}', '{date}', {value:.4f}),".format(code=symbol, code_type='hk', date=date, value=open_price) high_sql += "('{code}', '{code_type}', '{date}', {value:.4f}),".format(code=symbol, code_type='hk', date=date, value=high) low_sql += "('{code}', '{code_type}', '{date}', {value:.4f}),".format(code=symbol, code_type='hk', date=date, value=low) close_sql += "('{code}', '{code_type}', '{date}', {value:.4f}),".format(code=symbol, code_type='hk', date=date, value=close) volume_sql += "('{code}', '{code_type}', '{date}', {value}),".format(code=symbol, code_type='hk', date=date, value=volume) average_sql += "('{code}', '{code_type}', '{date}', {value:.4f}),".format(code=symbol, code_type='hk', date=date, value=average) open_rows = self.db.query(open_sql.rstrip(',')) high_rows = self.db.query(high_sql.rstrip(',')) low_rows = self.db.query(low_sql.rstrip(',')) close_rows = self.db.query(close_sql.rstrip(',')) volume_rows = self.db.query(volume_sql.rstrip(',')) average_rows = self.db.query(average_sql.rstrip(',')) if open_rows.rowcount == 0: raise RuntimeError('open_sql 语句执行失败:%s' % open_sql) elif high_rows.rowcount == 0: raise RuntimeError('high_sql 语句执行失败:%s' % high_sql) elif low_rows.rowcount == 0: raise RuntimeError('low_sql 语句执行失败:%s' % low_sql) elif close_rows.rowcount == 0: raise RuntimeError('close_sql 语句执行失败:%s' % close_sql) elif volume_rows.rowcount == 0: raise RuntimeError('volume_sql 语句执行失败:%s' % volume_sql) elif average_rows.rowcount == 0: raise RuntimeError('average_sql 语句执行失败:%s' % average_sql) else: pass def crawl_data(self, codes): ''' 爬取 IB 接口股票的交易信息 ''' futures = [] i = 0 for code in codes: i += 1 symbol, _ = code stock = Stock(symbol, Config.hk_exchange, Config.hk_currency) future = self.ib.reqHistoricalDataAsync(stock, endDateTime='', durationStr='900 S', barSizeSetting='5 mins', whatToShow='TRADES', useRTH=True) self.ib.sleep(0.02) future.add_done_callback(functools.partial(self.deal_data, symbol=symbol)) futures.append(future) return futures def get_codes_data(self, codes=None): ''' 爬取股票信息 1个月的5分钟交易信息 ''' t1 = time.time() # codes => None 则从数据库获取股票列表 # 否则,使用传递进来的codes list,目的是再次爬取那些空数据的股票 # 以确保股票数据为空而不会遗漏有数据的股 # 因为有时连接超时,接口会返回空列表,但此股是有数据的 if codes is None: codes = self.db.get_codes() if not codes.rowcount: raise RuntimeError('获取股票失败,stock 表返回空.') codes = list(codes) self.total = len(codes) self.i = 0 futures = self.crawl_data(codes) self.ib.run(*futures) # 爬取完成,记录爬取的endDateTime时间,供下次增量爬取使用 end_date_time = '2017-12-31 23:59:59' res = self.db.set_record(end_date_time) if not res.rowcount: raise RuntimeError('记录一个月5分钟的end_date_time失败.') t2 = time.time() t3 = t2 - t1 print('HK 股票交易信息全部导入完成,耗时:%.2fs' % t3) self.log.info('导入股票信息完成,数据为空的股票有:{}'.format(self.empty)) def get_hsi_data(self): ''' 获取 HSI 一个月5分钟的信息 ''' symbol = 'HSI' exchange = 'HKFE' currency = 'HKD' index = Index(symbol, exchange, currency) data = self.ib.reqHistoricalData(index, endDateTime='20180119 15:00:00', durationStr='900 S', barSizeSetting='5 mins', whatToShow='TRADES', useRTH=True) if not data: raise RuntimeError('HSI 数据接口返回空.') sql = 'insert into `hsi_5m` (`date`, `open`, `high`, `low`, `close`) values ' for bar_data in data: date = bar_data.date open_price = bar_data.open high = bar_data.high low = bar_data.low close = bar_data.close sql += "('{date}', {open:.4f}, {high:.4f}, {low:.4f}, {close:.4f}),".format(date=date, open=open_price, high=high, low=low, close=close) res = self.db.query(sql.rstrip(',')) if res.rowcount == 0: raise RuntimeError('SQL 语句执行异常, 插入数据库失败:%s' % sql) else: print('HSI Index 1个月5分钟数据导入完成.', flush=True)
class AsyncIBDataProvider(GenericDataProvider): logger = logging.getLogger(__name__) def __init__(self, verbose: int, host: str, port: int, timeout: int, chunk_size: int, id=0, tz='America/New_York', **kwargs): super(AsyncIBDataProvider, self).__init__(self.logger, verbose, tz, chunk_size=chunk_size, **kwargs) self.port = port self.host = host self.timeout = timeout self.keep_alive = False if 'keep_alive' in kwargs: self.keep_alive = kwargs['keep_alive'] self.ib = IB() self.id = id def disconnect(self): self.ib.disconnect() def connect(self): if self.id == 0: id = int(random.uniform(1, 1000)) else: id = self.id self.logger.info( f"IBAsync: {self.host}:{self.port}, timeout={self.timeout}, id={id}" ) self.ib.connect(self.host, self.port, clientId=id, timeout=self.timeout, readonly=True) def _initialize(self): if not self.ib.isConnected(): self.connect() def _finish(self): if not self.keep_alive: self.disconnect() async def _get_data_internal_async(self, symbol_data: SymbolData, **kwargs) -> pd.DataFrame: return self._get_data_internal(symbol_data) def _get_data_internal(self, symbol_data: SymbolData) -> pd.DataFrame: self.logger.info(f"Getting symbol data: {symbol_data}") if symbol_data.timeframe == 'day': symbol, bars = self._get_daily(symbol_data.start, symbol_data.symbol, symbol_data.end) symbol = symbol_data.symbol.split('-')[0] dataframe = self._to_dataframe(bars) elif symbol_data.timeframe == '60min': now = f"{(datetime.now()):%Y-%m-%d %H:%M}" duration = '365 D' if symbol_data.start: diff = datetime.strptime( now, '%Y-%m-%d %H:%M') - datetime.strptime( symbol_data.start, '%Y-%m-%d %H:%M') if diff.days < 365: duration = f"{diff.days} D" symbol, bars = self._get_intraday(symbol_data.symbol, now, duration, '1 hour', symbol_data.rth_only) symbol = symbol_data.symbol.split('-')[0] dataframe = self._to_dataframe(bars, tz_fix=True) elif symbol_data.timeframe == '5min': now = f"{(datetime.now()):%Y-%m-%d %H:%M}" duration = '30 D' if symbol_data.start: diff = datetime.strptime( now, '%Y-%m-%d %H:%M') - datetime.strptime( symbol_data.start, '%Y-%m-%d %H:%M') if diff.days < 30: duration = f"{diff.days} D" symbol, bars = self._get_intraday(symbol_data.symbol, now, duration, '5 mins', symbol_data.rth_only) symbol = symbol_data.symbol.split('-')[0] dataframe = self._to_dataframe(bars, tz_fix=True) elif symbol_data.timeframe == '15min': now = f"{(datetime.now()):%Y-%m-%d %H:%M}" duration = '60 D' if symbol_data.start: diff = datetime.strptime( now, '%Y-%m-%d %H:%M') - datetime.strptime( symbol_data.start, '%Y-%m-%d %H:%M') if diff.days < 60: duration = f"{diff.days} D" symbol, bars = self._get_intraday(symbol_data.symbol, now, duration, '15 mins', symbol_data.rth_only) symbol = symbol_data.symbol.split('-')[0] dataframe = self._to_dataframe(bars, tz_fix=True) else: raise Exception(f"{symbol_data.timeframe} not implemented!") df = dataframe if dataframe.empty: self.logger.warning(f"Got empty df for {symbol_data}") else: df = self._post_process(dataframe, symbol, symbol_data.start, symbol_data.end, symbol_data.timeframe, symbol_data.transform) return df @staticmethod def exctract_symbol(ticker: str, type: str = 'STK', exchange: str = 'ARCA', currency: str = 'USD', expire='', multiplier='') -> tuple: if ticker.count('-') == 4: symbol, type, exchange, currency, multiplier = ticker.split('-') if type.isdigit(): expire = type type = "FUT" elif ticker.count('-') == 3: symbol, type, exchange, currency = ticker.split('-') if type.isdigit(): expire = type type = "FUT" elif ticker.count('-') == 2: if ticker.find('CASH') > -1: symbol, currency, exchange = ticker.split('-') symbol = symbol.replace('.', '') type = 'FX' else: a, b, c = ticker.split('-') if b.isdigit(): type = 'FUT' symbol = a exchange = c expire = b else: symbol = a exchange = b currency = c elif ticker.count('-') == 1: symbol, exchange = ticker.split('-') else: symbol = ticker return type, symbol, exchange, currency, expire, multiplier @staticmethod def parse_contract(ticker): """ Backtrader contract specification (https://www.backtrader.com/docu/live/ib/ib.html): TICKER # Stock type and SMART exchange TICKER-STK # Stock and SMART exchange TICKER-STK-EXCHANGE # Stock TICKER-STK-EXCHANGE-CURRENCY # Stock TICKER-CFD # CFD and SMART exchange TICKER-CFD-EXCHANGE # CFD TICKER-CDF-EXCHANGE-CURRENCY # Stock TICKER-IND-EXCHANGE # Index TICKER-IND-EXCHANGE-CURRENCY # Index TICKER-YYYYMM-EXCHANGE # Future TICKER-YYYYMM-EXCHANGE-CURRENCY # Future TICKER-YYYYMM-EXCHANGE-CURRENCY-MULT # Future TICKER-FUT-EXCHANGE-CURRENCY-YYYYMM-MULT # Future TICKER-YYYYMM-EXCHANGE-CURRENCY-STRIKE-RIGHT # FOP TICKER-YYYYMM-EXCHANGE-CURRENCY-STRIKE-RIGHT-MULT # FOP TICKER-FOP-EXCHANGE-CURRENCY-YYYYMM-STRIKE-RIGHT # FOP TICKER-FOP-EXCHANGE-CURRENCY-YYYYMM-STRIKE-RIGHT-MULT # FOP CUR1.CUR2-CASH-IDEALPRO # Forex TICKER-YYYYMMDD-EXCHANGE-CURRENCY-STRIKE-RIGHT # OPT TICKER-YYYYMMDD-EXCHANGE-CURRENCY-STRIKE-RIGHT-MULT # OPT TICKER-OPT-EXCHANGE-CURRENCY-YYYYMMDD-STRIKE-RIGHT # OPT TICKER-OPT-EXCHANGE-CURRENCY-YYYYMMDD-STRIKE-RIGHT-MULT # OPT :return: """ contract_type, symbol, exchange, currency, expire, multiplier = \ AsyncIBDataProvider.exctract_symbol(ticker) if contract_type == 'FX': return Forex(pair=symbol) if contract_type == 'IND': return Index(symbol, exchange, currency) if contract_type == 'FUT': return Future(symbol, expire, exchange, currency=currency, multiplier=multiplier) else: return Stock(symbol, exchange, currency) def _get_intraday(self, ticker: str, to_date: str, duration: str, barsize: str, rth_only: bool) -> (str, [BarData]): to_dt = datetime.strptime(f"{to_date}", '%Y-%m-%d %H:%M') contract = AsyncIBDataProvider.parse_contract(ticker) whatToShow = 'MIDPOINT' if isinstance(contract, (Forex, CFD, Commodity)) else 'TRADES' bars = self.ib.reqHistoricalData(contract, endDateTime=to_dt, durationStr=duration, barSizeSetting=barsize, whatToShow=whatToShow, useRTH=rth_only, formatDate=2) return contract.symbol, bars def _get_daily(self, from_date: str, ticker: str, to_date: str) -> (str, [BarData]): #TODO: strip HH:MM from start/end dates? from_dt = datetime.strptime(from_date, "%Y-%m-%d") today = datetime.strptime(to_date, "%Y-%m-%d") to_dt = datetime(today.year, today.month, today.day, 23, 59, 59) days = (to_dt - from_dt).days if days > 365: self.logger.warning(f"Historical data is limited to 365 Days. " f"Only requesting for year '{from_dt.year}'") days = 365 to_dt = datetime(from_dt.year, 12, 31, 23, 59, 59) if to_dt > datetime.today(): to_dt = None contract = AsyncIBDataProvider.parse_contract(ticker) whatToShow = 'MIDPOINT' if isinstance(contract, (Forex, CFD, Commodity)) else 'TRADES' # bars = self.ib.reqDailyBars(contract, 2016) bars = self.ib.reqHistoricalData(contract, endDateTime=to_dt, durationStr=F"{days} D", barSizeSetting='1 day', whatToShow=whatToShow, useRTH=True, formatDate=1) return contract.symbol, bars def _to_dataframe(self, bars, tz_fix=False): if tz_fix: data = [{ 'Date': pd.to_datetime( b.date.astimezone(self.tz).replace(tzinfo=None)), 'Open': b.open, 'High': b.high, 'Low': b.low, 'Close': b.close, 'Volume': b.volume } for b in bars] else: data = [{ 'Date': pd.to_datetime(b.date), 'Open': b.open, 'High': b.high, 'Low': b.low, 'Close': b.close, 'Volume': b.volume } for b in bars] if len(data) > 0: return pd.DataFrame(data).set_index('Date') else: return pd.DataFrame() def add_quotes(self, data, ticker): return data
class IBDataService: ip = "127.0.0.1" port = 4002 # 4001 for real trading def __init__(self): self.uid = random.randint(1000, 10000) print(f"init - UID: {str(self.uid)}") self.ib = IB() self.connect() def connect(self, *args): print(f"connectToIB - UID: {str(self.uid)}") if self.ib.isConnected() is False: print("CONNECTING ...") self.ib.connect("127.0.0.1", 4002, clientId=self.uid) print("CONNECTED") def disconnect(self, *args): print(f"connectToIB - UID: {str(self.uid)}") if self.ib.isConnected(): print("DISCONNECTING ...") self.ib.disconnect() print("DISCONNECTED ...") def getContractDetail(self, contract): print(f"getContractDetail - UID: {str(self.uid)}") data = self.ib.reqContractDetails(contract) # print(data) if len(data) > 0: return data[0] else: return None def getFuturesContractDetail(self, contract): print(f"getFuturesContractDetail - UID: {str(self.uid)}") data = self.ib.reqContractDetails(contract) if len(data) > 0: return data else: return None def getHistoricalData(self, contract, endDate="", duration="1 Y", barSize="1 day", price="MIDPOINT"): print(f"getHistoricalData - UID: {str(self.uid)}") data = self.ib.reqHistoricalData(contract, endDate, duration, barSize, price, 1, 1, False, []) return data async def startRealtimeData(self, contract, method): print(f"startRealtimeData - UID: {str(self.uid)}") self.ib.reqMktData(contract, "233", False, False) ticker = self.ib.reqTickByTickData(contract, TickDataType.LAST.value) ticker.updateEvent += method print(f"ENDS - startRealtimeData - UID: {str(self.uid)}") def stopRealtimeData(self, contract): print(f"stopRealtimeData - UID: {str(self.uid)}") self.ib.cancelMktData(contract) self.ib.cancelTickByTickData(contract, TickDataType.LAST.value) print(f"ENDS - stopRealtimeData - UID: {str(self.uid)}")
class trade_ES(): def __init__(self): self.ib = IB() self.ib.connect('127.0.0.1', 7497, clientId=np.random.randint(10, 1000)) self.tickers_ret = {} self.endDateTime = '' self.No_days = '43200 S' self.interval = '30 secs' self.tickers_signal = "Hold" self.ES = Future(symbol='ES', lastTradeDateOrContractMonth='20200619', exchange='GLOBEX', currency='USD') self.ib.qualifyContracts(self.ES) self.ES_df = self.ib.reqHistoricalData(contract=self.ES, endDateTime=self.endDateTime, durationStr=self.No_days, barSizeSetting=self.interval, whatToShow='TRADES', useRTH=False, keepUpToDate=True) self.tickers_ret = [] self.options_ret = [] self.option = {'call': FuturesOption, 'put': FuturesOption} self.options_history = {} self.trade_options = {'call': [], 'put': []} self.price = 0 self.i = -1 self.ES_df.updateEvent += self.make_clean_df self.Buy = True self.Sell = False self.ib.positionEvent += self.order_verify self.waitTimeInSeconds = 220 self.tradeTime = 0 self.mySemaphore = asyncio.Semaphore(1) def run(self): self.make_clean_df(self.ES_df) def next_exp_weekday(self): weekdays = {2: [6, 0], 4: [0, 1, 2], 0: [3, 4]} today = datetime.date.today().weekday() for exp, day in weekdays.items(): if today in day: return exp def next_weekday(self, d, weekday): days_ahead = weekday - d.weekday() if days_ahead <= 0: # Target day already happened this week days_ahead += 7 date_to_return = d + datetime.timedelta( days_ahead) # 0 = Monday, 1=Tuself.ESday, 2=Wednself.ESday... return date_to_return.strftime('%Y%m%d') def get_strikes_and_expiration(self): expiration = self.next_weekday(datetime.date.today(), self.next_exp_weekday()) chains = self.ib.reqSecDefOptParams(underlyingSymbol='ES', futFopExchange='GLOBEX', underlyingSecType='FUT', underlyingConId=self.ES.conId) chain = util.df(chains) strikes = chain[chain['expirations'].astype(str).str.contains( expiration)].loc[:, 'strikes'].values[0] [ESValue] = self.ib.reqTickers(self.ES) ES_price = ESValue.marketPrice() strikes = [ strike for strike in strikes if strike % 5 == 0 and ES_price - 10 < strike < ES_price + 10 ] return strikes, expiration def get_contract(self, right, net_liquidation): strikes, expiration = self.get_strikes_and_expiration() for strike in strikes: contract = FuturesOption(symbol='ES', lastTradeDateOrContractMonth=expiration, strike=strike, right=right, exchange='GLOBEX') self.ib.qualifyContracts(contract) self.price = self.ib.reqMktData(contract, "", False, False) if float(self.price.last) * 50 >= net_liquidation: continue else: return contract def make_clean_df(self, ES_df, hashbar=None): ES_df = util.df(ES_df) ES_df['RSI'] = ta.RSI(ES_df['close']) ES_df['macd'], ES_df['macdsignal'], ES_df['macdhist'] = ta.MACD( ES_df['close'], fastperiod=12, slowperiod=26, signalperiod=9) ES_df['MA_9'] = ta.MA(ES_df['close'], timeperiod=9) ES_df['MA_21'] = ta.MA(ES_df['close'], timeperiod=21) ES_df['MA_200'] = ta.MA(ES_df['close'], timeperiod=200) ES_df['EMA_9'] = ta.EMA(ES_df['close'], timeperiod=9) ES_df['EMA_21'] = ta.EMA(ES_df['close'], timeperiod=21) ES_df['EMA_200'] = ta.EMA(ES_df['close'], timeperiod=200) ES_df['ATR'] = ta.ATR(ES_df['high'], ES_df['low'], ES_df['close']) ES_df['roll_max_cp'] = ES_df['high'].rolling(20).max() ES_df['roll_min_cp'] = ES_df['low'].rolling(20).min() ES_df['roll_max_vol'] = ES_df['volume'].rolling(20).max() ES_df.dropna(inplace=True) self.loop_function(ES_df) def placeOrder(self, contract, order): trade = self.ib.placeOrder(contract, order) tradeTime = datetime.datetime.now() return ([trade, contract, tradeTime]) def sell(self, contract, position): self.ib.qualifyContracts(contract) if position.position > 0: order = 'Sell' else: order = 'Buy' marketorder = MarketOrder(order, abs(position.position)) marketTrade, contract, tradeTime = self.placeOrder( contract, marketorder) while self.ib.position.position != 0: self.ib.sleep(1) self.mySemaphore.release() async def buy(self, contract): await self.semaphore.acquire() self.ib.qualifyContracts(contract) marketorder = MarketOrder('Buy', 1) marketTrade = self.ib.placeOrder(contract, marketorder) def order_verify(self, order): if order.position == 0.0 or order.position < 0: self.Buy = True self.Sell = False elif order.position > 0: self.Buy = False self.Sell = True else: self.Buy = False self.Sell = False print(f'Buy= {self.Buy}, sell = {self.Sell}') def loop_function(self, ES_df): df = ES_df[[ 'high', 'low', 'volume', 'close', 'RSI', 'ATR', 'roll_max_cp', 'roll_min_cp', 'roll_max_vol', 'EMA_9', 'EMA_21', 'macd', 'macdsignal' ]] if self.tickers_signal == "Hold": print('Hold') if df["high"].iloc[self.i] >= df["roll_max_cp"].iloc[self.i] and \ df["volume"].iloc[self.i] > df["roll_max_vol"].iloc[self.i - 1] and df['RSI'].iloc[self.i] > 30 \ and df['macd'].iloc[self.i] > df['macdsignal'].iloc[self.i] : self.tickers_signal = "Buy" return elif df["low"].iloc[self.i] <= df["roll_min_cp"].iloc[self.i] and \ df["volume"].iloc[self.i] > df["roll_max_vol"].iloc[self.i - 1] and df['RSI'].iloc[self.i] < 70 \ and df['macd'].iloc[self.i] < df['macdsignal'].iloc[self.i]: self.tickers_signal = "Sell" return else: self.tickers_signal = "Hold" return elif self.tickers_signal == "Buy": print('BUY SIGNAL') if df["close"].iloc[self.i] > df["close"].iloc[self.i - 1] - ( 0.75 * df["ATR"].iloc[self.i - 1]) and len( self.ib.positions()) != 0: print( f'{df["close"].iloc[self.i]} > {df["close"].iloc[self.i - 1] - (0.75 * df["ATR"].iloc[self.i - 1])}' ) print('first buy condition') positions = self.ib.positions() for position in positions: if position.contract.right == 'C': self.sell(position.contract, position) self.tickers_signal = "Hold" return elif df["low"].iloc[self.i] <= df["roll_min_cp"].iloc[self.i] and \ df["volume"].iloc[self.i] > df["roll_max_vol"].iloc[self.i - 1] and df['RSI'].iloc[self.i] < 70 \ and df['macd'].iloc[self.i] < df['macdsignal'].iloc[self.i] and len(self.ib.positions())!=0: self.tickers_signal = "Sell" print('sell') positions = self.ib.positions() for position in positions: if position.contract.right == 'C': self.sell(position.contract, position) self.tickers_signal == "Sell" return else: if len(self.ib.positions()) == 0: self.option['call'] = self.get_contract( right="C", net_liquidation=2000) self.buy(self.option['call']) self.tickers_signal = "Hold" else: self.tickers_signal = "Hold" elif self.tickers_signal == "Sell": print('SELL SIGNAL') if df["close"].iloc[self.i] < df["close"].iloc[self.i - 1] + ( 0.75 * df["ATR"].iloc[self.i - 1]) and len( self.ib.positions()) != 0: print('first sell condition') print( f'{df["close"].iloc[self.i]} < {df["close"].iloc[self.i - 1] - (0.75 * df["ATR"].iloc[self.i - 1])}' ) print('sell') positions = self.ib.positions() for position in positions: if position.contract.right == 'P': self.sell(position.contract, position) self.tickers_signal = "Hold" return elif df["high"].iloc[self.i] >= df["roll_max_cp"].iloc[self.i] and \ df["volume"].iloc[self.i] > df["roll_max_vol"].iloc[self.i - 1] and df['RSI'].iloc[self.i] > 30 \ and df['macd'].iloc[self.i] > df['macdsignal'].iloc[self.i] and len(self.ib.positions())!=0: self.tickers_signal = "Buy" print('sell') positions = self.ib.positions() for position in positions: if position.contract.right == 'P': self.sell(position.contract, position) self.tickers_signal == "Buy" return else: if len(self.ib.positions()) == 0: self.option['put'] = self.get_contract( right="P", net_liquidation=2000) self.buy(self.option['put']) self.tickers_signal = "Hold" else: self.tickers_signal = "Hold" def checkError(self, errCode, errString): print('Error Callback', errCode, errString) if errCode == 2104: print('re-connect after 5 secs') self.ib.sleep(5) self.ib.disconnect() self.ib.connect('127.0.0.1', 7497, clientId=np.random.randint(10, 1000)) self.make_clean_df(self.ES)
class _DataLoader: """ Private class Responsible for all ETL related tasks. Loads data from csv, fetches data from Binance API. Attributes ----------- binance: Client object which is the Binance API Python wrapper Methods ------------ _load_csv _get_range _get_binance_futures_candles _timeframe_setter Please look at each method for descriptions """ SECOND_TO_MILLISECOND = 1000 def __init__(self, db=False, qtrade=False, ib=False): self.sql = SqlMapper() self.ib = IB() self.binance = Client() if qtrade: self.qtrade = Questrade( token_yaml= 'C:/Users/haseab/Desktop/Python/PycharmProjects/FAB/local/Workers/access_token.yml', save_yaml=True) print('Connected to Questrade API') if db: self.conn = self.sql.connect_psql() if ib: print(self.ib.connect('127.0.0.1', 7496, 104)) def load_db_data(self, symbol, conns, chunks=3): results = [] resp_df = self.sql.SELECT( f"explain select * from candlesticks where symbol = '{symbol}'", show_select=False, cursor=conns[0].cursor()) table_row_count = int( resp_df.loc[0, 'QUERY PLAN'].split(' ')[-2].strip("rows=")) limit = table_row_count // chunks with ThreadPoolExecutor(max_workers=3) as executor: for chunk, conn in zip(range(0, table_row_count, limit), conns): print('start') cursor = conn.cursor() results.append( executor.submit( self.sql.SELECT, f"* FROM candlesticks WHERE SYMBOL = '{symbol}' AND TF = '1' LIMIT {limit} OFFSET {chunk}", cursor)) print('done') executor.shutdown(wait=True) return results def _randomly_delete_rows(self, df, percentage_of_data=0.10): index_list = [] for _ in range(len(df) // (1 / percentage_of_data)): index = random.choice(df.index) if index not in index_list: index_list.append(index) return df.drop(index_list) def _clean_1m_data(self, df): start_date = df['timestamp'].iloc[0] end_date = df['timestamp'].iloc[-1] full_timestamps = pd.DataFrame( [time for time in range(start_date, end_date + 60, 60)], columns=['timestamp']) full_df = full_timestamps.merge(df.reset_index(), on='timestamp', how='left') full_df['volume'] = full_df['volume'].fillna(0.001) filled_df = full_df.fillna(method='ffill') filled_df['date'] = [ datetime.fromtimestamp(timestamp) for timestamp in filled_df['timestamp'].values ] return filled_df def _load_csv_v2(self, csv_url): tf = csv_url.split(' ')[2][:-1] symbol = csv_url.split(' ')[1] data = pd.read_csv(csv_url) data['timestamp'] = [ int(timestamp / 1000) for timestamp in data['timestamp'] ] data['date'] = [ datetime.fromtimestamp(timestamp) for timestamp in data['timestamp'].values ] data['tf'] = [tf] * len(data) data['symbol'] = [symbol] * len(data) data[["open", "high", "low", "close", "volume"]] = data[["open", "high", "low", "close", "volume"]].astype(float) data = data[[ 'symbol', 'tf', 'timestamp', 'date', 'open', 'high', 'low', 'close', 'volume' ]] return data.set_index(["symbol", "tf", "timestamp"]) def _load_csv(self, csv_url: str) -> pd.DataFrame: """Function used to load 1-minute historical candlestick data with a given csv url The important columns are the ones that create the candlestick (open, high, low, close) """ # Reading CSV File containing 1 min candlestick data data = pd.read_csv(csv_url, index_col='timestamp') # Converting Timestamp numbers into a new column of readable dates data['date'] = [ datetime.fromtimestamp(timestamp) for timestamp in data.index ] data[["open", "high", "low", "close", "volume"]] = data[["open", "high", "low", "close", "volume"]].astype(float) data = data[['date', 'open', 'high', 'low', 'close', 'volume']] return data def _get_binance_futures_candles(self, symbol: str, tf: int, start_candles_ago: int, end_candles_ago: int = 0, now: float = None) -> pd.DataFrame: """ Provides a method for getting a set of candlestick data without inputting start and end date. Ex. _get_binance_futures_candles("BTCUSDT", 5, 3) = get candlestick data from 5 minutes ago to 3 minutes ago. Parameters: ----------- symbol: str Ex. "BTCUSDT", "ETHUSDT" start_minutes_ago: int Ex. 1, 5, 1000 end_minutes_ago: int Ex. 1, 5, 1000 :return pd.DataFrame of candlestick data. """ if now == None: now = time.time() # Defining params to put in exchange API call map_tf = { 1: "1m", 3: "3m", 5: "5m", 15: "15m", 30: "30m", 60: "1h", 120: "2h", 240: "4h", 360: "6h", 480: "8h" } start_minutes_ago = start_candles_ago * tf end_minutes_ago = end_candles_ago * tf start_time = Helper().minutes_ago_to_timestamp( start_minutes_ago, now, adjust=self.SECOND_TO_MILLISECOND) end_time = Helper().minutes_ago_to_timestamp( end_minutes_ago, now, adjust=self.SECOND_TO_MILLISECOND) num_candles = abs(start_candles_ago - end_candles_ago) data = self.binance.futures_klines(symbol=symbol, interval=map_tf[tf], startTime=start_time, endTime=end_time, limit=num_candles) return Helper.into_dataframe(data, symbol=symbol, tf=tf, index=False) def load_finviz_data(): import pandas as pd from finviz.screener import Screener filters_nyse = ['exch_nyse' ] # Shows companies in NASDAQ which are in the S&P500 stock_list_nyse = Screener( filters=filters_nyse, table='Ownership', order='Market Cap' ) # Get the performance table and sort it by price ascending filters_nasdaq = ['exch_nasd'] stock_list_nasd = Screener( filters=filters_nasdaq, table='Ownership', order='Market Cap' ) # Get the performance table and sort it by price ascending nasdaq_df = pd.DataFrame(stock_list_nasd.data).drop('No.', axis=1) nyse_df = pd.DataFrame(stock_list_nyse.data).drop('No.', axis=1) df = nyse_df.append(nasdaq_df).reset_index(drop=True) df.to_csv('finviz_stocks.csv', index=False) return df def _get_ibkr_stocks_candles(self, symbol: str, tf: int, start_time, end_time): tf_map = { 1: "1 min", 5: "5 mins", 15: "15 mins", 30: "30 mins", 60: "1 hour", 240: "4 hours", 1440: "1 day" } parsed_start, parsed_end = dateparser.parse( start_time), dateparser.parse(end_time) duration = (parsed_end - parsed_start).days + 1 bars = self.ib.reqHistoricalData(Stock(str(symbol), 'SMART', 'USD'), endDateTime=parsed_end, durationStr=f'{duration} D', barSizeSetting=tf_map[tf], whatToShow='TRADES', useRTH=False, formatDate=1) return Helper.into_dataframe(bars, symbol=symbol, tf=tf) def _get_range(self, dataframe: pd.DataFrame, start_date: str = None, end_date: str = None) -> pd.DataFrame: """Returns the range of 1-min data within specified start & end date from the entire dataset Parameters ---------- dataframe: pd.DataFrame object with a Timestamp as its index start_date: date in the format of YYYY-MM-DD format end_date: date in the format of YYYY-MM-DD format :return dataframe """ if start_date == None or end_date == None: raise Exception("No Start date given") start_date = Helper.string_to_timestamp(start_date) end_date = Helper.string_to_timestamp(end_date) # Converting from timestamp index to numbered index, then adding numbered index as column dataframe_temp = dataframe.reset_index().reset_index().set_index( 'timestamp') start_index = dataframe_temp.loc[start_date, 'index'] try: end_index = dataframe_temp.loc[end_date, 'index'] except KeyError: end_index = dataframe_temp['index'].iloc[-1] return dataframe[start_index:end_index + 1] def _timeframe_setter(self, dataframe: pd.DataFrame, skip: int, shift: int = 0, keep_last_row=False) -> pd.DataFrame: """ Vertical way of abstracting data Converts minute candlestick data into the timeframe(tf) of choice. Parameters ----------- dataframe: the dataframe that is being passed as an argument tf: The combination of 1-min candles into one value. Number of 1-min candles combined is the timeframe value itself. The raw data is in a 1-min timeframe. Dataframe contains the following columns: ['open', 'high', 'Low, 'close']. Converting to a X minute timeframe is handled differently for every column of the candlestick: Close - Since all that matters is the close value every 'tf' minutes, you can skip every 'tf' minutes. Ex. df['close'] = pd.Series([4.50, 4.60, 4.65, 4.44, 4.21, 4.54, 4.10]) _timeframe_setter(df['close'], 2) -> [4.50, 4.65, 4.21, 4.10] _timeframe_setter(df['close'], 3) -> [[4.50, 4.44, 4.10] Open - Same rules as Close High - Get the maximum 1-min high value given the range of the timeframe Ex. df['close'] = pd.Series([4.50, 4.60, 4.65, 4.44, 4.21, 4.54, 4.10]) _timeframe_setter(df['high'], 2) -> [4.60, 4.65, 4.44, 4.54] _timeframe_setter(df['high'], 3) -> [4.65, 4.54] Low - Same rules as 'high', but instead the minimum of that range Volume - Same rules as "High", but instead the sum of that range If the range of tf is not even (such as having a tf=2 but only 5 elements), then the last value will be dropped :return dataframe """ if skip == 1: return dataframe base_tf = int(dataframe['tf'].iloc[0]) if shift == None: # This is making sure that there it shifts so that the last tf candle includes the last 1-minute candle shift = skip - len(dataframe) % skip - 1 dataframe[["open", "high", "low", "close", "volume" ]] = dataframe[["open", "high", "low", "close", "volume"]].astype(float) # Creating a new dataframe so that the size of the rows of the new dataframe will be the same as the new columns df = dataframe.iloc[shift::skip].copy() rolled_df = dataframe.rolling(skip) high = rolled_df['high'].max() low = rolled_df['low'].min() volume = rolled_df['volume'].sum() close = dataframe.copy()['close'] # Abstracting based on the highest, lowest and sum respectively. df['high'] = np.append(high.iloc[shift + skip::skip].values, high.iloc[-1]) df['low'] = np.append(low.iloc[shift + skip::skip].values, low.iloc[-1]) df['volume'] = np.append(volume.iloc[shift + skip::skip].values, volume.iloc[-1]) # Selecting every nth value in the list, where n is the timeframe try: df['close'] = close.iloc[shift + skip - 1::skip].values except ValueError as e: df['close'] = np.append(close.iloc[shift + skip - 1::skip].values, close.iloc[-1]) tf = base_tf * skip df['tf'] = [tf] * len(df['volume']) # Dropping the last value, this gets rid of the candle that isn't complete until the end of the tf if not keep_last_row: df.drop(df.tail(1).index, inplace=True) return df.reset_index().set_index(['symbol', 'tf', 'timestamp']) def _get_fast_questrade_data(self, symbol, start_datetime, end_datetime, tf_str, tf): data = self.qtrade.get_historical_data(symbol, start_datetime, end_datetime, tf_str) return Helper.into_dataframe(data, symbol=symbol, tf=tf, qtrade=True) def _get_fast_ibkr_data(self, symbol, duration, end_datetime, tf_str, tf): data = self.ib.reqHistoricalData(Stock(str(symbol), 'SMART', 'USD'), endDateTime=end_datetime, durationStr=f'{duration} D', barSizeSetting=tf_str, whatToShow='TRADES', useRTH=False, formatDate=1) return Helper.into_dataframe(data, symbol=symbol, tf=tf) ################################################# ASYNC FUNCTIONS ############################################################ async def _async_get_fast_questrade_data(self, symbol, start_datetime, end_datetime, tf_str, tf): data = self.qtrade.get_historical_data(symbol, start_datetime, end_datetime, tf_str) return Helper.into_dataframe(data, symbol=symbol, tf=tf, qtrade=True) async def _async_get_fast_ibkr_data(self, symbol, duration, end_datetime, tf_str, tf): data = await self.ib.reqHistoricalDataAsync( Stock(str(symbol), 'SMART', 'USD'), endDateTime=end_datetime, durationStr=f'{duration} D', barSizeSetting=tf_str, whatToShow='TRADES', useRTH=False, formatDate=1) print(symbol, tf) # return data return Helper.into_dataframe(data, symbol=symbol, tf=tf) ################################################################################################################################## def get_ibkr_stock_candles(self, symbol, tf, start_time, end_time): tf_map = { 1: "1 min", 5: "5 mins", 15: "15 mins", 30: "30 mins", 60: "1 hour", 240: "4 hours", 1440: "1 day" } start_datetime, end_datetime = dateparser.parse( start_time), dateparser.parse(end_time) duration = (end_datetime - start_datetime).days + 1 data = self.ib.reqHistoricalData(Stock(str(symbol), 'SMART', 'USD'), endDateTime=end_datetime, durationStr=f'{duration} D', barSizeSetting=tf_map[tf], whatToShow='TRADES', useRTH=False, formatDate=1) return util.df(data) def get_questrade_stock_candles(self, symbol: str, tf: int, start_time, end_time): tf_map = { 1: "OneMinute", 5: "FiveMinutes", 15: "FifteenMinutes", 30: "HalfHour", 60: "OneHour", 240: "FourHours", 1440: "OneDay" } parsed_start, parsed_end = dateparser.parse( start_time), dateparser.parse(end_time) parsed_start, parsed_end = parsed_start.strftime( '%Y-%m-%d %H:%M:%S.%f'), parsed_end.strftime( '%Y-%m-%d %H:%M:%S.%f') print('finished converting the times', parsed_start, parsed_end) data = self.qtrade.get_historical_data(symbol, parsed_start, parsed_end, tf_map[tf]) print('got data', len(data)) return Helper.into_dataframe(data, symbol=symbol, tf=tf, qtrade=True) def get_binance_candles(self, symbol, tf, start_date, end_date=None): map_tf = { 1: "1m", 3: "3m", 5: "5m", 15: "15m", 30: "30m", 60: "1h", 120: "2h", 240: "4h", 360: "6h", 480: "8h" } lst = self.binance.get_historical_klines(symbol=symbol, interval=map_tf[tf], start_str=start_date) return Helper.into_dataframe(lst, symbol=symbol, tf=tf, index=False) def get_all_binance_data(self, symbol, tf, start_date, end_date=None): map_tf = { 1: "1m", 3: "3m", 5: "5m", 15: "15m", 30: "30m", 60: "1h", 120: "2h", 240: "4h", 360: "6h", 480: "8h" } list_symbol = self.binance.get_historical_klines(symbol=symbol, interval=map_tf[tf], start_str=start_date) df_symbol = pd.DataFrame(list_symbol) df_symbol.columns = [ "timestamp", "open", "high", "low", "close", "volume", "timestamp_end", "", "", "", "", "" ] ##Fixing Columns df_symbol['timestamp'] = [ int(timestamp / 1000) for timestamp in df_symbol['timestamp'] ] df_symbol['date'] = [ datetime.fromtimestamp(timestamp) for timestamp in df_symbol['timestamp'].values ] df_symbol['tf'] = [tf[:-1]] * len(df_symbol) df_symbol['symbol'] = [symbol] * len(df_symbol) df_symbol[["open", "high", "low", "close", "volume" ]] = df_symbol[["open", "high", "low", "close", "volume"]].astype(float) df_symbol = df_symbol[[ 'symbol', 'tf', 'timestamp', 'date', 'open', 'high', 'low', 'close', 'volume' ]] df_symbol = df_symbol.set_index(["symbol", "tf", "timestamp"]) start_date = str(df_symbol.iloc[0, 0])[:10] string = f"Binance {symbol} {tf}m data from {start_date} to {str(datetime.now())[:10]}.csv" print(string) # df_symbol.to_csv(string) return df_symbol
class Window(qt.QWidget): def __init__(self, host, port, clientId): qt.QWidget.__init__(self) self.setWindowTitle("Giulio's App") self.canvas = MplCanvas() # self.edit = qt.QLineEdit('', self) # self.edit.editingFinished.connect(self.add) self.table = HistoricalTable() self.MAList = [] self.MADict = {} self.connectButton = qt.QPushButton('Connect') self.connectButton.setStyleSheet( "border: 1px solid black; background: white") self.connectButton.resize(100, 32) self.connectButton.setGeometry(200, 150, 100, 40) self.connectButton.clicked.connect(self.onConnectButtonClicked) self.displayButton = qt.QPushButton('Display values') self.displayButton.setStyleSheet( "border: 1px solid black; background: white") self.displayButton.resize(100, 32) self.displayButton.clicked.connect(self.onDisplayButtonClicked) self.cancelAllButton = qt.QPushButton('CancelAll') self.cancelAllButton.setStyleSheet( "border: 1px solid black; background: white") self.cancelAllButton.resize(100, 32) self.cancelAllButton.setGeometry(200, 150, 100, 40) self.cancelAllButton.clicked.connect(self.onCancelAllButtonClicked) layout = qt.QVBoxLayout(self) # layout.addWidget(self.edit) layout.addWidget(self.table) #layout.addWidget(self.canvas) layout.addWidget(self.connectButton) layout.addWidget(self.cancelAllButton) # layout.addStretch(1) # self.fig = plt.figure() # self.ax = self.fig.add_subplot(1, 1, 1) self.xs = [] self.ys = [] # layout.addWidget(self.fig) self.connectInfo = (host, port, clientId) self.ib = IB() self.headers = [ 'symbol', 'bidSize', 'bid', 'ask', 'askSize', 'last', 'lastSize', 'close' ] self.id = 1 self.firstSignal = True self.isConnectionBroken = False self.firstma50 = 0 self.firstma200 = 0 self.availableCash = 0 self.ib.orderStatusEvent += self.order_status_cb self.ib.execDetailsEvent += self.exec_details_cb self.ib.errorEvent += self.error_cb self.ib.accountSummaryEvent += self.accountSummary self.ib.pendingTickersEvent += self.onPendingTickers # self.ib.pendingTickersEvent += self.table.onPendingTickers async def accountSummaryAsync(self, account: str = '') -> \ List[AccountValue]: if not self.wrapper.acctSummary: # loaded on demand since it takes ca. 250 ms await self.reqAccountSummaryAsync() if account: return [ v for v in self.wrapper.acctSummary.values() if v.account == account ] else: return list(self.wrapper.acctSummary.values()) def accountSummary(self, account: str = '') -> List[AccountValue]: if (account.tag == 'BuyingPower'): logging.info('account buying power - ' + account.value) accVal: float = 0.0 accVal = account.value self.availableCash = float(accVal) self.availableCash = round(self.availableCash, 2) logging.info('available cash - ' + str(self.availableCash)) logging.info("account summary:: " + str(account.account) + " " + account.tag + " " + account.value) return [] #self._run(self.accountSummaryAsync(account)) def error_cb(self, reqId, errorCode, errorString, contract): logging.error("error: " + str(reqId) + " , " + str(errorCode) + " , " + str(errorString)) logging.error("string - " + str(errorString)) """if(errorCode == 1100): logging.error("Connectivity between IB and TWS has been lost") self.isConnectionBroken = True if (errorCode == 1300): logging.error("socket connection dropped") self.isConnectionBroken = True if(errorCode == 2105): logging.error("HMDS data farm connection is broken") if ((errorCode == 2104 or errorCode == 2106) and self.isConnectionBroken == True): logging.info("HMDS data farm connection has been restored") self.reqData()""" def reqGlobalCancel(self): """ Cancel all active trades including those placed by other clients or TWS/IB gateway. """ self.ib.reqGlobalCancel() logging.info('reqGlobalCancel') def order_status_cb(self, trade): logging.info("order status for " + str(trade.order.orderId)) logging.info("Status filled and remaining - " + trade.orderStatus.status + " " + str(trade.orderStatus.filled) + " " + str(trade.orderStatus.remaining)) def exec_details_cb(self, trade, fill): logging.info("exec details for " + fill.contract.symbol + " with orderid " + str(fill.execution.orderId)) if (fill.execution.side == "Sell"): self.availableCash += fill.execution.price def onPendingTickers(self, tickers): for ticker in tickers: logging.info("ticker - " + str(ticker.contract.conId) + " " + str(ticker.contract.secType) + " " + ticker.contract.symbol + " " + ticker.contract.currency) for col, header in enumerate(self.headers): if col == 0: continue val = getattr(ticker, header) symbol = ticker.contract.symbol + (ticker.contract.currency if ticker.contract.secType == 'CASH' else '') ma = self.MADict[symbol] logging.info("Values - " + str(ticker.contract.secType) + " " + str(ticker.contract.conId) + " " + symbol + " " + str(header) + " " + str(col) + " val- " + str(val)) if (str(header) == 'bid'): ma.bid = val if (str(header) == 'ask'): ma.ask = val def onBarUpdate(self, bars, hasNewBar): self.xs.append(dt.datetime.now().strftime('%H:%M:%S.%f')) # logging.debug("bar update " + str(hasNewBar) + " for " + str(bars.reqId)) logging.info(bars[-1]) symbol = bars.contract.symbol + ( bars.contract.currency if bars.contract.secType == 'CASH' else '') ma = self.MADict[symbol] logging.info("update for " + ma.symbol) df = util.df(bars) # logging.debug(df) ma.setMAs(df) ma50 = ta.MA(df['close'], 50) ma200 = ta.MA(df['close'], 200) self.ys.append(ma50) self.xs = self.xs[-50:] self.ys = self.ys[-50:] # self.ax.clear() # self.ax.plot(self.xs, self.ys) plt.xticks(rotation=45, ha='right') plt.subplots_adjust(bottom=0.30) plt.title('50MA') plt.ylabel('MA') """logging.debug("ma50") logging.debug(ma50) logging.debug("ma200") logging.debug(ma200) logging.debug("last items") logging.debug(ma50.tail(1).item()) logging.debug(ma200.tail(1).item())""" orderList = ma.checkGCDC() if (orderList is not None): orderQuantity = 0 for order in orderList: if (order.orderType == "LMT"): if (order.action == "Buy"): order.totalQuantity = 1000 #(self.availableCash/ma.bid) * .01 self.availableCash -= (order.totalQuantity * order.trailStopPrice) logging.info("Placing buy order for " + ma.symbol + " " + str(ma.bid) + " with orderId " + str(order.orderId)) else: order.totalQuantity = 1000 #(self.availableCash/ma.ask) * .01 logging.info("Placing sell order for " + ma.symbol + " at " + str(ma.ask) + " with orderId " + str(order.orderId)) orderQuantity = order.totalQuantity else: if (order.orderType == "TRAIL"): order.totalQuantity = orderQuantity if (order.action == "Buy"): #order.totalQuantity = (self.availableCash / ma.bid) * .01 self.availableCash -= (order.totalQuantity * order.trailStopPrice) logging.info("Placing buy order for " + ma.symbol + " " + str(ma.bid) + " with orderId " + str(order.orderId)) else: #order.totalQuantity = (self.availableCash / ma.ask) * .01 logging.info("Placing sell order for " + ma.symbol + " at " + str(ma.ask) + " with orderId " + str(order.orderId)) logging.info("Placing " + order.action + " order for " + ma.symbol + " at " + str(order.trailStopPrice) + " " + str(ma.ask) + " with orderId " + str(order.orderId) + " " + str(trade.order.orderId)) trade = self.ib.placeOrder(bars.contract, order) if (ma.isOrderActive == False and ma.GCCheck == True): logging.info("order is not active and gccheck is true") self.MADict[symbol] = ma """if (ma.firstSignal == True): ma.firstma50 = round(ma50.tail(1).item(), 6) ma.firstma200 = round(ma200.tail(1).item(), 6) ma.firstSignal = False if (ma.firstma50 < ma.firstma200): logging.info("checking golden cross for " + ma.symbol + " : mas - " + str(ma.firstma50) + " " + str(ma.firstma200)) else: logging.info("checking death cross for " + ma.symbol + " : mas - " + str(ma.firstma50) + " " + str(ma.firstma200)) ma.GCCheck = False self.MADict[symbol] = ma else: prevma50 = ma.getMa50() prevma200 = ma.getMa200() currma50 = round(ma50.tail(1).item(), 6) currma200 = round(ma200.tail(1).item(), 6) if(ma.isOrderActive == False): if(ma.GCCheck == True): logging.info("golden cross check for " + ma.symbol) logging.info("prev mas - " + str(prevma50) + " " + str(prevma200)) logging.info("curr mas - " + str(currma50) + " " + str(currma200)) logging.info("curr bid and ask vals - " + str(ma.bid) + " " + str(ma.ask)) if((prevma50 <= prevma200) and (currma50 > currma200)): logging.info(("golden cross occured for " + ma.symbol)) ma.GCCheck = False if(ma.isOrderActive == False): ma.isOrderActive = True order = TrailOrder("Buy", 1000, ma.ask, 20) trade = self.ib.placeOrder(bars.contract, order) logging.info("Placing buy order for " + ma.symbol + " at " + str(order.trailStopPrice) + " " + str(ma.ask) + " with orderId " + str(order.orderId) + " " + str(trade.order.orderId)) self.MADict[symbol] = ma else: logging.info("death cross check for " + ma.symbol) logging.info("prev mas - " + str(prevma50) + " " + str(prevma200)) logging.info("curr mas - " + str(currma50) + " " + str(currma200)) if ((prevma50 >= prevma200) and (currma50 < currma200)): logging.info(("death cross occured for " + ma.symbol)) ma.GCCheck = True if (ma.isOrderActive == False): ma.isOrderActive = True order = TrailOrder("Sell", 1000, ma.bid, 20) trade = self.ib.placeOrder(bars.contract, order) logging.info("Placing sell order for " + ma.symbol + " at " + str(ma.bid) + " with orderId " + str(trade.order.orderId)) self.MADict[symbol] = ma """ ma.setMa50(round(ma50.tail(1).item(), 6)) ma.setMa200(round(ma200.tail(1).item(), 6)) self.MADict[symbol] = ma logging.debug("MAs for " + str(bars.contract.secType) + " " + str(bars.contract.symbol) + " " + bars.contract.currency + " , reqid: " + str(bars.reqId) + " " + str(ma50.values[-1]) + " " + str(ma200.values[-1]) + " : " + str(ma50.tail(1).item()) + " " + str(ma200.tail(1).item())) self.table.updateData(bars.reqId, round(ma50.tail(1).item(), 6), round(ma200.tail(1).item(), 6)) # logging.debug(ma50.values[-1]) # plt.close() # plot = util.barplot(bars) # clear_output(wait=True) # display(plot) def add_historical(self, text=''): logging.debug("text - " + text) logger.debug("logging") text = text or self.edit.text() if text: logging.debug('eval text ') # + eval(text)) contract = eval(text) logging.debug("requesting historical and mkt data for " + text) bars = self.ib.reqHistoricalData(contract, endDateTime='', durationStr='2000 S', barSizeSetting='10 secs', whatToShow='MIDPOINT', useRTH=True, formatDate=1, keepUpToDate=True) ticker = self.ib.reqMktData(contract, '', False, False, None) logging.info(bars[-1]) logging.debug("sectype " + str(bars.reqId) + " " + str(bars.contract.conId) + " " + bars.contract.secType + " " + bars.contract.symbol + " " + bars.contract.currency) self.table.addHistoricalData(bars.reqId, contract) df = util.df(bars) # with pd.option_context('display.max_rows', None, 'display.max_columns', # None): # more options can be specified also # logging.debug(df) close = pd.DataFrame(df, columns=['close']) logging.debug("close ") logging.debug(close) # df['pandas_SMA_3'] = df.iloc[:, 1].rolling(window=3).mean() # df.head() #ma50 = ta.MA(df['close'], 50) #ma200 = ta.MA(df['close'], 200) symbol = bars.contract.symbol + (bars.contract.currency if bars.contract.secType == 'CASH' else '') logging.info("symbol - " + symbol) ma = MovingAverages( self.ib, symbol, bars.reqId ) #, round(ma50.tail(1).item(), 6), round(ma200.tail(1).item(), 6)) ma.setMAs(df) self.MAList.append(ma) self.MADict[symbol] = ma """logging.debug("ma50") logging.debug(ma50) logging.debug("ma200") logging.debug(ma200) logging.debug("initial ma vals for " + symbol) logging.debug(ma50.tail(1).item()) logging.debug(ma200.tail(1).item())""" self.table.updateData(bars.reqId, round(ma.ma50.tail(1).item(), 6), round(ma.ma200.tail(1).item(), 6)) # sma = pd.SMA(df['close'].values, timeperiod=4) """portfolio = self.ib.portfolio()#.wrapper.portfolio.cash = 10000 logging.debug("portfolio") logging.debug(portfolio) positions = self.ib.positions() logging.debug("positions") for x in range(len(positions)): logging.debug(positions[x].contract.symbol) logging.debug(positions[x].position)""" # logging.debug(positions) bars.updateEvent += self.onBarUpdate logging.debug("reqid is " + str(bars.reqId) + " for " + bars.contract.symbol + " " + bars.contract.currency + " , sectype - " + bars.contract.secType) def onDisplayButtonClicked(self, _): logging.debug("MA values") for ma in self.MAList: logging.debug("symbol - " + " " + ma.symbol) logging.debug( str(ma.firstma50) + " " + str(ma.firstma200) + " " + str(ma.firstSignal) + " " + str(ma.ma50) + " " + str(ma.ma200)) for x in self.MADict: logging.debug(x) for x in self.MADict.values(): logging.debug("dict values - " + str(x.firstSignal) + " " + x.symbol + " " + str(x.firstma50) + " " + str(x.firstma200) + " " + str(x.ma50) + " " + str(x.ma200)) def onConnectButtonClicked(self, _): logging.debug("isconnected: " + str(self.ib.isConnected())) if self.ib.isConnected(): self.ib.disconnect() logging.debug("clearing data") self.table.clearData() self.connectButton.setText('Connect') logging.debug("done") else: logging.debug("trying to connect") # ib = IB() # ib.connect('127.0.0.1', 7497, clientId=3) self.ib.connect('127.0.0.1', 7497, clientId=2) # *self.connectInfo) logging.debug("connected - ") # + self.ib.isConnected()) # self.ib.reqMarketDataType(2) self.connectButton.setText('Disconnect') self.ib.reqAccountSummary() self.reqData() def onCancelAllButtonClicked(self): logging.info("Cancelling all open orders") #self.ib.connect('127.0.0.1', 7497, clientId=2) # *self.connectInfo) self.reqGlobalCancel() def reqData(self): #self.reqGlobalCancel() """for symbol in ('EURUSD', 'USDJPY', 'EURGBP', 'USDCAD', 'EURCHF', 'AUDUSD', 'NZDUSD'): logging.debug("requesting for " + symbol) self.add_historical(f"Forex('{symbol}')")""" #self.add_historical("Stock('TSLA', 'SMART', 'USD')") #self.add_historical("Stock('IBM', 'SMART', 'USD')") #self.add_historical("Stock('MSFT', 'SMART', 'USD')") self.add_historical("Stock('FB', 'SMART', 'USD')") def closeEvent(self, ev): logging.debug("closing") asyncio.get_event_loop().stop()
from ib_insync import IB, util, Forex if __name__ == "__main__": ib = IB() ib.connect('localhost', 4001, clientId=1) contract = Forex('EURUSD') bars = ib.reqHistoricalData(contract, endDateTime='', durationStr='30 D', barSizeSetting='1 hour', whatToShow='MIDPOINT', useRTH=True) # convert to pandas dataframe: df = util.df(bars) print(df)
class request(IB): def __init__(self, symbol, temp, client): self.symbol = symbol self.temp = temp instruments = pd.read_csv('instruments.csv').set_index('symbol') self.params = instruments.loc[self.symbol] self.market = str(self.params.market) self.exchange = str(self.params.exchange) self.tick_size = float(self.params.tick_size) self.digits = int(self.params.digits) self.leverage = int(self.params.leverage) self.client = client self.current_date() self._sundays_activation() self.ib = IB() print(self.ib.connect('127.0.0.1', 7497, self.client)) self.connected = self.ib.isConnected() ####### self.get_contract() self.interrumption = False #self.data = self.download_data(tempo=temp, duration='1 D') #self.ib.reqMktData(self.contract, '', False, False); self.ticker = self.ib.ticker(self.contract) ######### #self.ticker = self.ib.reqTickByTickData(self.contract, 'Last', 0) self.bars = self.ib.reqRealTimeBars(self.contract, 5, 'MIDPOINT', False) self.operable = True def operable_schedule(self): if self.weekday == 4 and pd.to_datetime( self.hour).time() > pd.to_datetime('18:00:00').time(): print('%s %s | Today is Friday and Market has Closed!' % (self.date, self.hour)) self.operable = False elif self.weekday == 5: print('%s %s | Today is Saturday and market is not Opened' % (self.date, self.hour)) self.operable = False else: self.operable = True def current_date(self): self.date = datetime.now().strftime('%Y-%m-%d') self.weekday = datetime.now().weekday() self.hour = datetime.now().strftime('%H:%M:%S') def _sundays_activation(self): hour = '18:00:05' if self.weekday == 6: if pd.to_datetime(self.hour).time() < pd.to_datetime(hour).time(): print('Today is Sunday. Bot activation is at 18:00:00') while True: self.current_date() if pd.to_datetime( self.hour).time() >= pd.to_datetime(hour).time(): print('Activation Done') self.send_telegram_message( '%s %s | Bot Activation Done' % (self.date, self.hour)) break def continuous_check_message(self, message): if datetime.now().minute == 0 and datetime.now().second == 0: self.send_telegram_message(message, type='info') def reconnection(self): if self.hour == '23:44:30' or self.hour == '16:59:30': self.interrumption = True self.ib.disconnect() self.connected = self.ib.isConnected() print('%s %s | Ib disconnection' % (self.date, self.hour)) print('Connected: %s' % self.connected) if self.hour == '23:46:00' or self.hour == '18:00:05': self.interrumption = False print('%s %s | Reconnecting...' % (self.date, self.hour)) while not self.connected: try: self.ib.connect('127.0.0.1', 7497, self.client) self.connected = self.ib.isConnected() if self.connected: print('%s %s | Connection reestablished!' % (self.date, self.hour)) print('Requesting Market Data...') self.bars = self.ib.reqRealTimeBars( self.contract, 5, 'MIDPOINT', False) print('Last Close of %s: %.2f' % (self.symbol, self.bars[-1].close)) print('%s Data has been Updated!' % self.symbol) except: print( '%s %s | Connection Failed! Trying to reconnect in 10 seconds...' % (self.date, self.hour)) self.ib.sleep(10) print('%s %s | %s Data has been Updated!' % (self.date, self.hour, self.symbol)) def _local_symbol_selection(self): '''Selects local symbol according to symbol and current date''' current_date = datetime.now().date() # csv file selection according to symbol if self.symbol in ['ES', 'RTY', 'NQ', 'MES', 'MNQ', 'M2K']: contract_dates = pd.read_csv( 'D:/Archivos/futuro/Algorithmics/Codes/My_Bots/Hermes/contract_dates/indexes_globex.txt', parse_dates=True) elif self.symbol in ['YM', 'MYM', 'DAX']: contract_dates = pd.read_csv( 'D:/Archivos/futuro/Algorithmics/Codes/My_Bots/Hermes/contract_dates/indexes_ecbot_dtb.txt', parse_dates=True) elif self.symbol in ['QO', 'MGC']: contract_dates = pd.read_csv( 'D:/Archivos/futuro/Algorithmics/Codes/My_Bots/Hermes/contract_dates/QO_MGC.txt', parse_dates=True) elif self.symbol in ['CL', 'QM']: contract_dates = pd.read_csv( 'D:/Archivos/futuro/Algorithmics/Codes/My_Bots/Hermes/contract_dates/CL_QM.txt', parse_dates=True) else: contract_dates = pd.read_csv( 'D:/Archivos/futuro/Algorithmics/Codes/My_Bots/Hermes/contract_dates/%s.txt' % symbol, parse_dates=True) # Current contract selection according to current date for i in range(len(contract_dates)): initial_date = pd.to_datetime( contract_dates.iloc[i].initial_date).date() final_date = pd.to_datetime( contract_dates.iloc[i].final_date).date() if initial_date <= current_date <= final_date: current_contract = contract_dates.iloc[i].contract break # local symbol selection local = current_contract if self.symbol in [ 'ES', 'RTY', 'NQ', 'MES', 'MNQ', 'M2K', 'QO', 'CL', 'MGC', 'QM' ]: local = '%s%s' % (self.symbol, current_contract) if self.symbol in ['YM', 'ZS']: local = '%s %s' % (self.symbol, current_contract) if self.symbol == 'MYM': local = '%s %s' % (self.symbol, current_contract) if self.symbol == 'DAX': local = 'FDAX %s' % current_contract return local def get_contract(self): if self.market == 'futures': local = self._local_symbol_selection() self.contract = Future(symbol=self.symbol, exchange=self.exchange, localSymbol=local) print( self.ib.reqContractDetails( self.contract)[0].contract.lastTradeDateOrContractMonth) '''expiration = self.ib.reqContractDetails(Future(self.symbol,self.exchange))[0].contract.lastTradeDateOrContractMonth self.contract = Future(symbol=self.symbol, exchange=self.exchange, lastTradeDateOrContractMonth=expiration)''' elif self.market == 'forex': self.contract = Forex(self.symbol) elif self.market == 'stocks': self.contract = Stock(symbol=self.symbol, exchange=self.exchange, currency='USD') def download_data(self, tempo, duration): pr = (lambda market: 'MIDPOINT' if market == 'forex' else 'TRADES')(self.market) historical = self.ib.reqHistoricalData(self.contract, endDateTime='', durationStr=duration, barSizeSetting=tempo, whatToShow=pr, useRTH=False, keepUpToDate=False) return historical def send_telegram_message(self, message, type='trades'): bot_token = '1204313430:AAGonra1LaFhyI1gCVOHsz8yAohJUeFgplo' bot_chatID = '-499850995' if type == 'trades' else '-252750334' url = 'https://api.telegram.org/bot%s/sendMessage?chat_id=%s&text=%s' % ( bot_token, bot_chatID, message) requests.get(url)
class IBStore(with_metaclass(MetaSingleton, object)): '''Singleton class wrapping an ibpy ibConnection instance. The parameters can also be specified in the classes which use this store, like ``IBData`` and ``IBBroker`` Params: - ``host`` (default:``127.0.0.1``): where IB TWS or IB Gateway are actually running. And although this will usually be the localhost, it must not be - ``port`` (default: ``7496``): port to connect to. The demo system uses ``7497`` - ``clientId`` (default: ``None``): which clientId to use to connect to TWS. ``None``: generates a random id between 1 and 65535 An ``integer``: will be passed as the value to use. - ``notifyall`` (default: ``False``) If ``False`` only ``error`` messages will be sent to the ``notify_store`` methods of ``Cerebro`` and ``Strategy``. If ``True``, each and every message received from TWS will be notified - ``_debug`` (default: ``False``) Print all messages received from TWS to standard output - ``reconnect`` (default: ``3``) Number of attempts to try to reconnect after the 1st connection attempt fails Set it to a ``-1`` value to keep on reconnecting forever - ``timeout`` (default: ``3.0``) Time in seconds between reconnection attemps - ``timeoffset`` (default: ``True``) If True, the time obtained from ``reqCurrentTime`` (IB Server time) will be used to calculate the offset to localtime and this offset will be used for the price notifications (tickPrice events, for example for CASH markets) to modify the locally calculated timestamp. The time offset will propagate to other parts of the ``backtrader`` ecosystem like the **resampling** to align resampling timestamps using the calculated offset. - ``timerefresh`` (default: ``60.0``) Time in seconds: how often the time offset has to be refreshed - ``indcash`` (default: ``True``) Manage IND codes as if they were cash for price retrieval ''' # Set a base for the data requests (historical/realtime) to distinguish the # id in the error notifications from orders, where the basis (usually # starting at 1) is set by TWS REQIDBASE = 0x01000000 BrokerCls = None #getattr(sys.modules["cerebro.strategies." +classname.split('.')[0]], classname.split('.')[1])IBBroker #None # broker class will autoregister DataCls = None # data class will auto register params = ( ('host', '127.0.0.1'), ('port', 7496), ('clientId', None), # None generates a random clientid 1 -> 2^16 ('notifyall', False), # NOT IMPLEMENTED ('_debug', False), ('reconnect', 3), # -1 forever, 0 No, > 0 number of retries ('timeout', 3.0), # timeout between reconnections ('timeoffset', True), # Use offset to server for timestamps if needed ('timerefresh', 60.0), # How often to refresh the timeoffset ('indcash', True), # Treat IND codes as CASH elements ('readonly', False), # Set to True when IB API is in read-only mode ('account', ''), # Main account to receive updates for ) @classmethod def getdata(cls, *args, **kwargs): '''Returns ``DataCls`` with args, kwargs''' return cls.DataCls(*args, **kwargs) @classmethod def getbroker(cls, *args, **kwargs): '''Returns broker with *args, **kwargs from registered ``BrokerCls``''' return cls.BrokerCls(*args, **kwargs) def __init__(self): super(IBStore, self).__init__() self._env = None # reference to cerebro for general notifications self.broker = None # broker instance self.datas = list() # datas that have registered over start # self.ccount = 0 # requests to start (from cerebro or datas) # self._lock_tmoffset = threading.Lock() # self.tmoffset = timedelta() # to control time difference with server # # Structures to hold datas requests # self.qs = collections.OrderedDict() # key: tickerId -> queues # self.ts = collections.OrderedDict() # key: queue -> tickerId self.iscash = dict() # tickerIds from cash products (for ex: EUR.JPY) self.acc_cash = AutoDict() # current total cash per account self.acc_value = AutoDict() # current total value per account self.acc_upds = AutoDict() # current account valueinfos per account self.positions = collections.defaultdict(Position) # actual positions self.orderid = None # next possible orderid (will be itertools.count) self.managed_accounts = list() # received via managedAccounts self.notifs = queue.Queue() # store notifications for cerebro self.orders = collections.OrderedDict() # orders by order ided self.opending = collections.defaultdict(list) # pending transmission self.brackets = dict() # confirmed brackets self.last_tick = None # Use the provided clientId or a random one if self.p.clientId is None: self.clientId = random.randint(1, pow(2, 16) - 1) else: self.clientId = self.p.clientId if self.p.timeout is None: self.timeout = 2 else: self.timeout = self.p.timeout if self.p.readonly is None: self.readonly = False else: self.readonly = self.p.readonly if self.p.account is None: self.account = "" else: self.account = self.p.account if self.p._debug: util.logToConsole(level=logging.DEBUG) util.patchAsyncio() util.startLoop() self.ib = IB() self.ib.connect( host=self.p.host, port=self.p.port, clientId=self.clientId, timeout=self.timeout, readonly=self.readonly, account=self.account, ) # This utility key function transforms a barsize into a: # (Timeframe, Compression) tuple which can be sorted def keyfn(x): n, t = x.split() tf, comp = self._sizes[t] return (tf, int(n) * comp) # This utility key function transforms a duration into a: # (Timeframe, Compression) tuple which can be sorted def key2fn(x): n, d = x.split() tf = self._dur2tf[d] return (tf, int(n)) # Generate a table of reverse durations self.revdur = collections.defaultdict(list) # The table (dict) is a ONE to MANY relation of # duration -> barsizes # Here it is reversed to get a ONE to MANY relation of # barsize -> durations for duration, barsizes in self._durations.items(): for barsize in barsizes: self.revdur[keyfn(barsize)].append(duration) # Once managed, sort the durations according to real duration and not # to the text form using the utility key above for barsize in self.revdur: self.revdur[barsize].sort(key=key2fn) def start(self, data=None, broker=None): #self.reconnect(fromstart=True) # reconnect should be an invariant # Datas require some processing to kickstart data reception if data is not None: self._env = data._env # For datas simulate a queue with None to kickstart co self.datas.append(data) # if connection fails, get a fakeation that will force the # datas to try to reconnect or else bail out return self.getTickerQueue(start=True) elif broker is not None: self.broker = broker def stop(self): try: self.ib.disconnect() # disconnect should be an invariant except AttributeError: pass # conn may have never been connected and lack "disconnect" def get_notifications(self): '''Return the pending "store" notifications''' # The background thread could keep on adding notifications. The None # mark allows to identify which is the last notification to deliver self.notifs.put(None) # put a mark notifs = list() while True: notif = self.notifs.get() if notif is None: # mark is reached break notifs.append(notif) return notifs def managedAccounts(self): # 1st message in the stream self.managed_accounts = self.ib.managedAccounts() # Request time to avoid synchronization issues self.reqCurrentTime() def currentTime(self,msg): if not self.p.timeoffset: # only if requested ... apply timeoffset return curtime = datetime.fromtimestamp(float(msg.time)) with self._lock_tmoffset: self.tmoffset = curtime - datetime.now() threading.Timer(self.p.timerefresh, self.reqCurrentTime).start() def timeoffset(self): with self._lock_tmoffset: return self.tmoffset def reqCurrentTime(self): self.ib.reqCurrentTime() def nextOrderId(self): # Get the next ticker using a new request value from TWS self.orderid = self.ib.client.getReqId() return self.orderid def getTickerQueue(self, start=False): '''Creates ticker/Queue for data delivery to a data feed''' q = queue.Queue() if start: q.put(None) return q return q def getContractDetails(self, contract, maxcount=None): #cds = list() cds = self.ib.reqContractDetails(contract) #cds.append(cd) if not cds or (maxcount and len(cds) > maxcount): err = 'Ambiguous contract: none/multiple answers received' self.notifs.put((err, cds, {})) return None return cds def reqHistoricalDataEx(self, contract, enddate, begindate, timeframe, compression, what=None, useRTH=False, tz='', sessionend=None, #tickerId=None ): ''' Extension of the raw reqHistoricalData proxy, which takes two dates rather than a duration, barsize and date It uses the IB published valid duration/barsizes to make a mapping and spread a historical request over several historical requests if needed ''' # Keep a copy for error reporting purposes kwargs = locals().copy() kwargs.pop('self', None) # remove self, no need to report it if timeframe < TimeFrame.Seconds: # Ticks are not supported return self.getTickerQueue(start=True) if enddate is None: enddate = datetime.now() if begindate is None: duration = self.getmaxduration(timeframe, compression) if duration is None: err = ('No duration for historical data request for ' 'timeframe/compresison') self.notifs.put((err, (), kwargs)) return self.getTickerQueue(start=True) barsize = self.tfcomp_to_size(timeframe, compression) if barsize is None: err = ('No supported barsize for historical data request for ' 'timeframe/compresison') self.notifs.put((err, (), kwargs)) return self.getTickerQueue(start=True) return self.reqHistoricalData(contract=contract, enddate=enddate, duration=duration, barsize=barsize, what=what, useRTH=useRTH, tz=tz, sessionend=sessionend) # Check if the requested timeframe/compression is supported by IB durations = self.getdurations(timeframe, compression) # if not durations: # return a queue and put a None in it # return self.getTickerQueue(start=True) # Get or reuse a queue # if tickerId is None: # tickerId, q = self.getTickerQueue() # else: # tickerId, q = self.reuseQueue(tickerId) # reuse q for old tickerId # Get the best possible duration to reduce number of requests duration = None # for dur in durations: # intdate = self.dt_plus_duration(begindate, dur) # if intdate >= enddate: # intdate = enddate # duration = dur # begin -> end fits in single request # break intdate = begindate if duration is None: # no duration large enough to fit the request duration = durations[-1] # Store the calculated data # self.histexreq[tickerId] = dict( # contract=contract, enddate=enddate, begindate=intdate, # timeframe=timeframe, compression=compression, # what=what, useRTH=useRTH, tz=tz, sessionend=sessionend) barsize = self.tfcomp_to_size(timeframe, compression) if contract.secType in ['CASH', 'CFD']: #self.iscash[tickerId] = 1 # msg.field code if not what: what = 'BID' # default for cash unless otherwise specified elif contract.secType in ['IND'] and self.p.indcash: #self.iscash[tickerId] = 4 # msg.field code pass what = what or 'TRADES' q = self.getTickerQueue() histdata = self.ib.reqHistoricalData( contract, intdate.strftime('%Y%m%d %H:%M:%S') + ' GMT', duration, barsize, what, useRTH, 2) # dateformat 1 for string, 2 for unix time in seconds for msg in histdata: q.put(msg) return q def reqHistoricalData(self, contract, enddate, duration, barsize, what=None, useRTH=False, tz='', sessionend=None): '''Proxy to reqHistorical Data''' # get a ticker/queue for identification/data delivery q = self.getTickerQueue() if contract.secType in ['CASH', 'CFD']: #self.iscash[tickerId] = True if not what: what = 'BID' # TRADES doesn't work elif what == 'ASK': #self.iscash[tickerId] = 2 pass else: what = what or 'TRADES' # split barsize "x time", look in sizes for (tf, comp) get tf #tframe = self._sizes[barsize.split()[1]][0] # self.histfmt[tickerId] = tframe >= TimeFrame.Days # self.histsend[tickerId] = sessionend # self.histtz[tickerId] = tz histdata = self.ib.reqHistoricalData( contract, enddate.strftime('%Y%m%d %H:%M:%S') + ' GMT', duration, barsize, what, useRTH, 2) # dateformat 1 for string, 2 for unix time in seconds for msg in histdata: q.put(msg) return q def reqRealTimeBars(self, contract, useRTH=False, duration=5): '''Creates a request for (5 seconds) Real Time Bars Params: - contract: a ib.ext.Contract.Contract intance - useRTH: (default: False) passed to TWS - duration: (default: 5) passed to TWS Returns: - a Queue the client can wait on to receive a RTVolume instance ''' # get a ticker/queue for identification/data delivery q = self.getTickerQueue() rtb = self.ib.reqRealTimeBars(contract, duration, 'MIDPOINT', useRTH=useRTH) self.ib.sleep(duration) for bar in rtb: q.put(bar) return q def reqMktData(self, contract, what=None): '''Creates a MarketData subscription Params: - contract: a ib.ext.Contract.Contract intance Returns: - a Queue the client can wait on to receive a RTVolume instance ''' # get a ticker/queue for identification/data delivery q = self.getTickerQueue() ticks = '233' # request RTVOLUME tick delivered over tickString if contract.secType in ['CASH', 'CFD']: #self.iscash[tickerId] = True ticks = '' # cash markets do not get RTVOLUME if what == 'ASK': #self.iscash[tickerId] = 2 pass # q.put(None) # to kickstart backfilling # Can request 233 also for cash ... nothing will arrive md = MktData() q_ticks = queue.Queue() util.run(md.update_ticks(self.ib, contract, ticks, q_ticks)) while not q_ticks.empty(): ticker = q_ticks.get() for tick in ticker.ticks: # https://interactivebrokers.github.io/tws-api/tick_types.html if tick != self.last_tick: #last price #print(str(tick.time) +" >> " + str(tick.price)) self.last_tick = tick q.put(tick) return q # The _durations are meant to calculate the needed historical data to # perform backfilling at the start of a connetion or a connection is lost. # Using a timedelta as a key allows to quickly find out which bar size # bar size (values in the tuples int the dict) can be used. _durations = dict([ # 60 seconds - 1 min ('60 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min')), # 120 seconds - 2 mins ('120 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins')), # 180 seconds - 3 mins ('180 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins')), # 300 seconds - 5 mins ('300 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins')), # 600 seconds - 10 mins ('600 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins')), # 900 seconds - 15 mins ('900 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins')), # 1200 seconds - 20 mins ('1200 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins')), # 1800 seconds - 30 mins ('1800 S', ('1 secs', '5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins')), # 3600 seconds - 1 hour ('3600 S', ('5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour')), # 7200 seconds - 2 hours ('7200 S', ('5 secs', '10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour', '2 hours')), # 10800 seconds - 3 hours ('10800 S', ('10 secs', '15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour', '2 hours', '3 hours')), # 14400 seconds - 4 hours ('14400 S', ('15 secs', '30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour', '2 hours', '3 hours', '4 hours')), # 28800 seconds - 8 hours ('28800 S', ('30 secs', '1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour', '2 hours', '3 hours', '4 hours', '8 hours')), # 1 days ('1 D', ('1 min', '2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour', '2 hours', '3 hours', '4 hours', '8 hours', '1 day')), # 2 days ('2 D', ('2 mins', '3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour', '2 hours', '3 hours', '4 hours', '8 hours', '1 day')), # 1 weeks ('1 W', ('3 mins', '5 mins', '10 mins', '15 mins', '20 mins', '30 mins', '1 hour', '2 hours', '3 hours', '4 hours', '8 hours', '1 day', '1 W')), # 2 weeks ('2 W', ('15 mins', '20 mins', '30 mins', '1 hour', '2 hours', '3 hours', '4 hours', '8 hours', '1 day', '1 W')), # 1 months ('1 M', ('30 mins', '1 hour', '2 hours', '3 hours', '4 hours', '8 hours', '1 day', '1 W', '1 M')), # 2+ months ('2 M', ('1 day', '1 W', '1 M')), ('3 M', ('1 day', '1 W', '1 M')), ('4 M', ('1 day', '1 W', '1 M')), ('5 M', ('1 day', '1 W', '1 M')), ('6 M', ('1 day', '1 W', '1 M')), ('7 M', ('1 day', '1 W', '1 M')), ('8 M', ('1 day', '1 W', '1 M')), ('9 M', ('1 day', '1 W', '1 M')), ('10 M', ('1 day', '1 W', '1 M')), ('11 M', ('1 day', '1 W', '1 M')), # 1+ years ('1 Y', ('1 day', '1 W', '1 M')), ]) # Sizes allow for quick translation from bar sizes above to actual # timeframes to make a comparison with the actual data _sizes = { 'secs': (TimeFrame.Seconds, 1), 'min': (TimeFrame.Minutes, 1), 'mins': (TimeFrame.Minutes, 1), 'hour': (TimeFrame.Minutes, 60), 'hours': (TimeFrame.Minutes, 60), 'day': (TimeFrame.Days, 1), 'W': (TimeFrame.Weeks, 1), 'M': (TimeFrame.Months, 1), } _dur2tf = { 'S': TimeFrame.Seconds, 'D': TimeFrame.Days, 'W': TimeFrame.Weeks, 'M': TimeFrame.Months, 'Y': TimeFrame.Years, } def getdurations(self, timeframe, compression): key = (timeframe, compression) if key not in self.revdur: return [] return self.revdur[key] def getmaxduration(self, timeframe, compression): key = (timeframe, compression) try: return self.revdur[key][-1] except (KeyError, IndexError): pass return None def tfcomp_to_size(self, timeframe, compression): if timeframe == TimeFrame.Months: return '{} M'.format(compression) if timeframe == TimeFrame.Weeks: return '{} W'.format(compression) if timeframe == TimeFrame.Days: if not compression % 7: return '{} W'.format(compression // 7) return '{} day'.format(compression) if timeframe == TimeFrame.Minutes: if not compression % 60: hours = compression // 60 return ('{} hour'.format(hours)) + ('s' * (hours > 1)) return ('{} min'.format(compression)) + ('s' * (compression > 1)) if timeframe == TimeFrame.Seconds: return '{} secs'.format(compression) # Microseconds or ticks return None def dt_plus_duration(self, dt, duration): size, dim = duration.split() size = int(size) if dim == 'S': return dt + timedelta(seconds=size) if dim == 'D': return dt + timedelta(days=size) if dim == 'W': return dt + timedelta(days=size * 7) if dim == 'M': month = dt.month - 1 + size # -1 to make it 0 based, readd below years, month = divmod(month, 12) return dt.replace(year=dt.year + years, month=month + 1) if dim == 'Y': return dt.replace(year=dt.year + size) return dt # could do nothing with it ... return it intact # def histduration(self, dt1, dt2): # # Given two dates calculates the smallest possible duration according # # to the table from the Historical Data API limitations provided by IB # # # # Seconds: 'x S' (x: [60, 120, 180, 300, 600, 900, 1200, 1800, 3600, # # 7200, 10800, 14400, 28800]) # # Days: 'x D' (x: [1, 2] # # Weeks: 'x W' (x: [1, 2]) # # Months: 'x M' (x: [1, 11]) # # Years: 'x Y' (x: [1]) # td = dt2 - dt1 # get a timedelta for calculations # # First: array of secs # tsecs = td.total_seconds() # secs = [60, 120, 180, 300, 600, 900, 1200, 1800, 3600, 7200, 10800, # 14400, 28800] # idxsec = bisect.bisect_left(secs, tsecs) # if idxsec < len(secs): # return '{} S'.format(secs[idxsec]) # tdextra = bool(td.seconds or td.microseconds) # over days/weeks # # Next: 1 or 2 days # days = td.days + tdextra # if td.days <= 2: # return '{} D'.format(days) # # Next: 1 or 2 weeks # weeks, d = divmod(td.days, 7) # weeks += bool(d or tdextra) # if weeks <= 2: # return '{} W'.format(weeks) # # Get references to dt components # y2, m2, d2 = dt2.year, dt2.month, dt2.day # y1, m1, d1 = dt1.year, dt1.month, dt2.day # H2, M2, S2, US2 = dt2.hour, dt2.minute, dt2.second, dt2.microsecond # H1, M1, S1, US1 = dt1.hour, dt1.minute, dt1.second, dt1.microsecond # # Next: 1 -> 11 months (11 incl) # months = (y2 * 12 + m2) - (y1 * 12 + m1) + ( # (d2, H2, M2, S2, US2) > (d1, H1, M1, S1, US1)) # if months <= 1: # months <= 11 # return '1 M' # return '{} M'.format(months) # elif months <= 11: # return '2 M' # cap at 2 months to keep the table clean # # Next: years # # y = y2 - y1 + (m2, d2, H2, M2, S2, US2) > (m1, d1, H1, M1, S1, US1) # # return '{} Y'.format(y) # return '1 Y' # to keep the table clean def makecontract(self, symbol, sectype, exch, curr, expiry='', strike=0.0, right='', mult=1): '''returns a contract from the parameters without check''' contract = Contract() contract.symbol = symbol contract.secType = sectype contract.exchange = exch if curr: contract.currency = curr if sectype in ['FUT', 'OPT', 'FOP']: contract.lastTradeDateOrContractMonth = expiry if sectype in ['OPT', 'FOP']: contract.strike = strike contract.right = right if mult: contract.multiplier = mult return contract def cancelOrder(self, orderid): '''Proxy to cancelOrder''' self.ib.cancelOrder(orderid) def placeOrder(self, orderid, contract, order): '''Proxy to placeOrder''' trade = self.ib.placeOrder(contract, order) while not trade.isDone(): self.ib.waitOnUpdate() return trade def reqTrades(self): '''Proxy to Trades''' return self.ib.trades() def reqPositions(self): '''Proxy to reqPositions''' return self.ib.reqPositions() def getposition(self, contract, clone=False): # Lock access to the position dicts. This is called from main thread # and updates could be happening in the background #with self._lock_pos: position = self.positions[contract.conId] if clone: return copy(position) return position def reqAccountUpdates(self, subscribe=True, account=None): '''Proxy to reqAccountUpdates If ``account`` is ``None``, wait for the ``managedAccounts`` message to set the account codes ''' if account is None: #self._event_managed_accounts.wait() self.managedAccounts() account = self.managed_accounts[0] #self.ib.reqAccountUpdates(subscribe, bytes(account)) self.updateAccountValue() def updateAccountValue(self): # Lock access to the dicts where values are updated. This happens in a # sub-thread and could kick it at anytime #with self._lock_accupd: #if self.connected(): ret = self.ib.accountValues() for msg in ret: try: value = float(msg.value) except ValueError: value = msg.value self.acc_upds[msg.account][msg.tag][msg.currency] = value if msg.tag == 'NetLiquidation': # NetLiquidationByCurrency and currency == 'BASE' is the same self.acc_value[msg.account] = value elif msg.tag == 'TotalCashBalance' and msg.currency == 'BASE': self.acc_cash[msg.account] = value def get_acc_values(self, account=None): '''Returns all account value infos sent by TWS during regular updates Waits for at least 1 successful download If ``account`` is ``None`` then a dictionary with accounts as keys will be returned containing all accounts If account is specified or the system has only 1 account the dictionary corresponding to that account is returned ''' # Wait for at least 1 account update download to have been finished # before the account infos can be returned to the calling client # if self.connected(): # self._event_accdownload.wait() # Lock access to acc_cash to avoid an event intefering #with self._updacclock: if account is None: # wait for the managedAccount Messages # if self.connected(): # self._event_managed_accounts.wait() if not self.managed_accounts: return self.acc_upds.copy() elif len(self.managed_accounts) > 1: return self.acc_upds.copy() # Only 1 account, fall through to return only 1 account = self.managed_accounts[0] try: return self.acc_upds[account].copy() except KeyError: pass return self.acc_upds.copy() def get_acc_value(self, account=None): '''Returns the net liquidation value sent by TWS during regular updates Waits for at least 1 successful download If ``account`` is ``None`` then a dictionary with accounts as keys will be returned containing all accounts If account is specified or the system has only 1 account the dictionary corresponding to that account is returned ''' # Wait for at least 1 account update download to have been finished # before the value can be returned to the calling client # if self.connected(): # self._event_accdownload.wait() # Lock access to acc_cash to avoid an event intefering #with self._lock_accupd: if account is None: # wait for the managedAccount Messages # if self.connected(): # self._event_managed_accounts.wait() if not self.managed_accounts: return float() elif len(self.managed_accounts) > 1: return sum(self.acc_value.values()) # Only 1 account, fall through to return only 1 account = self.managed_accounts[0] try: return self.acc_value[account] except KeyError: pass return float() def get_acc_cash(self, account=None): '''Returns the total cash value sent by TWS during regular updates Waits for at least 1 successful download If ``account`` is ``None`` then a dictionary with accounts as keys will be returned containing all accounts If account is specified or the system has only 1 account the dictionary corresponding to that account is returned ''' # Wait for at least 1 account update download to have been finished # before the cash can be returned to the calling client' # if self.connected(): # self._event_accdownload.wait() # result = [v for v in self.ib.accountValues() \ # if v.tag == 'TotalCashBalance' and v.currency == 'BASE'] # Lock access to acc_cash to avoid an event intefering #with self._lock_accupd: if account is None: #wait for the managedAccount Messages # if self.connected(): # self._event_managed_accounts.wait() if not self.managed_accounts: return float() elif len(self.managed_accounts) > 1: return sum(self.acc_cash.values()) # Only 1 account, fall through to return only 1 account = self.managed_accounts[0] try: return self.acc_cash[account] except KeyError: pass
class SpectreDB: def __init__(self, json_path=None, key_name=None, table_name=None, use_IB=False): self.json_path = json_path self.key_name = key_name if table_name: self.table_name = table_name SpectreDB.instantiate_engine(self) if use_IB: from ib_insync import IB, Forex, util # Need to have IB Gateway open to execute # util.startLoop() # uncomment this line when in a notebook self.ib = IB() self.ib.connect('127.0.0.1', 7497, clientId=1) def instantiate_engine(self): ''' :return engine: sqlalchemy engine for colornoun database :return conn: sqlalchemy engine connection for colornoun database ''' # Connecting to mysql by providing a sqlachemy engine with open(self.json_path) as secrets_file: secrets = json.load(secrets_file) secrets_file.close() key_dict = secrets[self.key_name] engine_string = 'mysql+pymysql://{MYSQL_USER}:{MYSQL_PASSWORD}@{MYSQL_HOST_IP}:{MYSQL_PORT}/{MYSQL_DATABASE}' self.engine = sa.create_engine(engine_string.format( MYSQL_USER=key_dict['MYSQL_USER'], MYSQL_PASSWORD=key_dict['MYSQL_PASSWORD'], MYSQL_HOST_IP=key_dict['MYSQL_HOST_IP'], MYSQL_PORT=key_dict['MYSQL_PORT'], MYSQL_DATABASE=key_dict['MYSQL_DATABASE']), echo=False) self.conn = self.engine.connect() def parse_df(df, num_rows=10000): ''' Meant to parse pandas dataframe before upload. SQL connection times out if uploading more than 25k rows. :param df: pandas datatframe :param num_rows: number of rows for dataframe to be split into :return list of datafraes of size n_rows ''' df_list = list() num_split = len(df) // num_rows + 1 for i in range(num_split): df_list.append(df[i * num_rows:(i + 1) * num_rows]) return df_list def filter_duplicates(self, upload_df, table_name): ''' :param upload_df: pandas dataframe with column 'date' :param table_name: string return: upload_df without columns already in table ''' #Query dates in table of that time frame #Filter upload df of overlaping timestamps start = str(upload_df['date'].min()) end = str(upload_df['date'].max()) date_df = self.query_by_date(start, end, table_name, fields=['date']) out_df = upload_df.append(date_df, ignore_index=True, sort=False) out_df.drop_duplicates(['date'], keep=False, inplace=True) return out_df def upload_df_to_db(self, df, table_name=None, date_encode=False): ''' This is to help the parsing and uploading of pandas dataframes :param df: pandas dataframe :param table_name: name of table on database ''' if not table_name: table_name = self.table_name if date_encode: df = self.date_encoder(df) df.drop_duplicates(['date'], inplace=True) if sa.inspect(self.engine).get_table_names().count(table_name) > 0: df = self.filter_duplicates(df, table_name) df_list = SpectreDB.parse_df(df) for upload_df in df_list: try: upload_df.to_sql(table_name, con=self.engine, index=False, if_exists='append') #print('Success') except Exception as e: #print('Failure') print(e) return 1 def date_encoder(self, df): ''' input df: pandas dataframe with column 'date' in timezone-aware datetime.datetime example('2019-08-28 21:15:00+00:00') output: pandas dataframe with additional columns ['weekday', 'day_of_year' 'year', 'month', 'day', 'hour', 'minute'] ''' df['date'] = pd.to_datetime(df['date']) df['weekday'] = df['date'].apply(lambda x: x.weekday()) df['day_of_year'] = df['date'].apply(lambda x: x.dayofyear) df['year'] = df['date'].apply(lambda x: x.year) df['month'] = df['date'].apply(lambda x: x.month) df['day_of_month'] = df['date'].apply(lambda x: x.day) df['hour'] = df['date'].apply(lambda x: x.hour) df['minute'] = df['date'].apply(lambda x: x.minute) return df def drop_table(self, table_name=None): if not table_name: table_name = self.table_name self.conn.execute("DROP TABLE {}".format(table_name)) def query_by_date(self, start, end, table_name=None, fields=['date', 'close']): ''' This returns a dataframe of the requested data :param start: string in datetime format, eg. '2019-07-18 17:15:00' :param end: same format as start :param data: list of strings :return: pandas dataframe ''' if not table_name: table_name = self.table_name # query data and save file fields.append('date') #make sure no duplicate entries in data list fields = list(set(fields)) out_df = pd.DataFrame() stmt = 'SELECT ' + ','.join(map(str, list(fields))) stmt += ''' FROM `{}` '''.format(table_name) stmt += '''WHERE `date` BETWEEN '{}' AND '{}' '''.format(start, end) return pd.read_sql(stmt, self.conn) def parse_datetime(self, start, end, freq): ''' :param start: string in datetime format, eg. '2019-07-18 17:15:00' :param end: same format as start :param freq: string, format '# timeUnit' e.g. '1 hour' returns list of tuples (end[datetime], duration[str]) for requesting IB data ''' #Change the string declared frequency to timedelta object freq_unit = freq.split()[1][0] if freq_unit == 's': max_dur = dt.timedelta(days=1) elif freq_unit == 'm': if (freq.split()[1][1]) == 'i': max_dur = dt.timedelta(weeks=1) else: max_dur = dt.timedelta(weeks=104) elif freq_unit == 'h': max_dur = dt.timedelta(weeks=4) elif freq_unit == 'd': max_dur = dt.timedelta(weeks=24) elif freq_unit == 'w': max_dur = dt.timedelta(weeks=104) else: raise (RuntimeError(''' Incorrect input for 'freq' argument''')) out_list = [] start = dt.datetime.strptime(start + ' +0000', '%Y-%m-%d %H:%M:%S %z') end = dt.datetime.strptime(end + ' +0000', '%Y-%m-%d %H:%M:%S %z') t_diff = end - start window_count = t_diff // max_dur max_dur_str = '{} D'.format(max(1, max_dur.days)) while window_count > 0: out_list.append((end, max_dur_str)) end -= max_dur window_count -= 1 leftover_time = end - start out_list.append((end, '{} D'.format(max(1, 1 + leftover_time.days)))) return out_list def pull_from_IB(self, start, end, market='EURUSD', freq='1 hour'): ''' Note, all times should be UTC (aka Zulu, Z, GMT) :param start: string in datetime format, eg. '2019-07-18 17:15:00' :param end: same format as start :param freq: Must be one of: ‘1 secs’, ‘5 secs’, ‘10 secs’ 15 secs’, ‘30 secs’, ‘1 min’, ‘2 mins’, ‘3 mins’, ‘5 mins’, ‘10 mins’, ‘15 mins’, ‘20 mins’, ‘30 mins’, ‘1 hour’, ‘2 hours’, ‘3 hours’, ‘4 hours’, ‘8 hours’, ‘1 day’, ‘1 week’, ‘1 month’ (see IB Docs) ''' #Parse the requested window so API calls are not as lengthy time_list = self.parse_datetime(start, end, freq) df_list = [] contract = Forex(market) for time_pair in time_list: ''' useRTH (bool) – If True then only show data from within Regular Trading Hours, if False then show all data. formatDate (int) – For an intraday request setting to 2 will cause the returned date fields to be timezone-aware datetime.datetime with UTC timezone, instead of local timezone as used by TWS. ''' bars = self.ib.reqHistoricalData(contract, endDateTime=time_pair[0], durationStr=time_pair[1], barSizeSetting=freq, whatToShow='MIDPOINT', useRTH=True) # convert to pandas dataframe: df = util.df(bars) #drop columns ['barCount', 'average'] df_list.append(df[['date', 'open', 'high', 'low', 'close']].copy()) return df_list def update_from_IB(self, start, end, market='EURUSD', freq='1 hour'): df_list = self.pull_from_IB(start, end, market, freq) for df in df_list: self.upload_df_to_db(df, date_encode=True) return 1