def test_continuous_ticks(self): filter_provider = TicksInPeriodProvider( ticker=['AAPL', 'GOOG'], bgn_prd=datetime.datetime.now() - datetime.timedelta(days=1), ascend=True, delta=relativedelta(minutes=15)) listeners = AsyncListeners() with IQFeedHistoryEvents(listeners=listeners, fire_batches=True, filter_provider=filter_provider, num_connections=2) as listener: events_count = {'ticks': 0, 'batches': 0, 'minibatches': 0} e1 = threading.Event() def process_batch_listener(event): if event['type'] == 'level_1_tick_batch': try: self.assertTrue(len(event['data']) > 0) self.assertEqual(len(event['data'].shape), 2) self.assertEqual(event['data'].shape[1], 14) finally: events_count['batches'] += 1 if events_count['batches'] >= 2: e1.set() listeners += process_batch_listener listener.start() e1.wait()
def test_multiple_ticks(self): filter_provider = DefaultFilterProvider() filter_provider += TicksFilter(ticker=["IBM", "AAPL"], max_ticks=100) listeners = AsyncListeners() with IQFeedHistoryEvents( listeners=listeners, fire_batches=True, filter_provider=filter_provider, num_connections=2) as listener, listener.batch_provider( ) as provider: e1 = threading.Event() def process_batch_listener_column(event): if event['type'] == 'level_1_tick_batch': try: batch = event['data'] self.assertEqual(batch.shape[1], 14) finally: e1.set() listeners += process_batch_listener_column listener.start() e1.wait() for i, d in enumerate(provider): self.assertEqual(d.shape[1], 14) if i == 1: break
def test_daily_2(self): filter_provider = DefaultFilterProvider() filter_provider += BarsDailyFilter(ticker=["IBM", "AAPL"], num_days=20) listeners = AsyncListeners() with IQFeedHistoryEvents( listeners=listeners, fire_batches=True, filter_provider=filter_provider, num_connections=2) as listener, listener.batch_provider( ) as provider: e1 = threading.Event() def process_batch_listener_column(event): if event['type'] == 'daily_batch': batch = event['data'] self.assertEqual(batch.shape, (40, 8)) e1.set() listeners += process_batch_listener_column listener.start() e1.wait() for i, d in enumerate(provider): self.assertEqual(d.shape, (40, 8)) if i == 1: break
def test_continuous_bars(self): now = datetime.datetime.now() filter_provider = BarsInPeriodProvider(ticker=['AAPL', 'GOOG'], bgn_prd=datetime.datetime( now.year - 2, 1, 1), delta=relativedelta(days=10), interval_len=3600, ascend=True, interval_type='s') listeners = AsyncListeners() with IQFeedHistoryEvents(listeners=listeners, fire_batches=True, filter_provider=filter_provider, num_connections=2) as listener: events_count = {'bars': 0, 'batches': 0, 'minibatches': 0} e1 = threading.Event() def process_batch_listener(event): if event['type'] == 'bar_batch': try: self.assertEqual(len(event['data'].index.levels[0]), 2) self.assertFalse(event['data'].isnull().values.any()) finally: events_count['batches'] += 1 if events_count['batches'] >= 2: e1.set() listeners += process_batch_listener listener.start() e1.wait()
def test_bar_split_adjust_2(self): filter_provider = DefaultFilterProvider() filter_provider += BarsInPeriodFilter( ticker=["PLUS", "AAPL"], bgn_prd=datetime.datetime(2017, 3, 31), end_prd=datetime.datetime(2017, 4, 5), interval_len=3600, ascend=True, interval_type='s') listeners = AsyncListeners() with IQFeedHistoryEvents( listeners=listeners, fire_batches=True, filter_provider=filter_provider, sync_timestamps=False, timestamp_first=True, num_connections=2) as listener, listener.batch_provider( ) as provider: listener.start() for i, d in enumerate(provider): idx = pd.IndexSlice self.assertLess(d.loc[idx[:, 'PLUS'], 'open'].max(), 68) self.assertGreater(d.loc[idx[:, 'PLUS'], 'open'].min(), 65) self.assertGreater(d.loc[idx[:, 'AAPL'], 'open'].min(), 142) if i == 1: break
def test_1(self): e_orders = {'GOOG': threading.Event(), 'AAPL': threading.Event()} e_cancel = threading.Event() e_positions = threading.Event() listeners = AsyncListeners() class CustomIBEvents(IBEvents): def cancel_all_orders(self): self.reqOpenOrders() def openOrder(self, orderId, contract, order, orderState): super().openOrder(orderId, contract, order, orderState) if orderState.status == 'PreSubmitted': self.cancelOrder(orderId) e_orders[contract.symbol].set() def openOrderEnd(self): super().openOrderEnd() e_cancel.set() ibe = CustomIBEvents(listeners=listeners, ipaddress="127.0.0.1", portid=4002, clientid=0) with ibe: listeners += lambda x: e_orders['GOOG'].set() if isinstance( x['data'], BaseOrder) and x['type'] == 'order_fulfilled' and x[ 'data'].symbol == 'GOOG' else None listeners({ 'type': 'order_request', 'data': MarketOrder(Type.BUY, 'GOOG', 1) }) listeners += lambda x: e_orders['AAPL'].set() if isinstance( x['data'], BaseOrder) and x['type'] == 'order_fulfilled' and x[ 'data'].symbol == 'AAPL' else None listeners({ 'type': 'order_request', 'data': MarketOrder(Type.BUY, 'AAPL', 1) }) listeners += lambda x: e_positions.set() if isinstance( x['data'], pd.DataFrame) and x['type' ] == 'ibapi_positions' else None listeners({'type': 'positions_request', 'data': None}) for e in e_orders.values(): e.wait() e_positions.wait() ibe.cancel_all_orders() e_cancel.wait()
def test_logging(self): client = pymongo.MongoClient() try: # logging.basicConfig(level=logging.DEBUG) listeners = AsyncListeners() fulfilled_orders = SyncListeners() pm = PortfolioManager( listeners=listeners, initial_capital=10000, fulfilled_orders_event_stream=fulfilled_orders) store = MongoDBStore( client.test_db.store, lambda event: event['type'] == 'portfolio_update', listeners=listeners) # order 1 o1 = MarketOrder(Type.BUY, 'GOOG', 100) o1.add_position(14, 23) o1.add_position(86, 24) o1.fulfill_time = datetime.datetime.now() e1 = threading.Event() listeners += lambda x: e1.set() if x['type' ] == 'store_object' else None fulfilled_orders(o1) e1.wait() # order 2 o2 = MarketOrder(Type.BUY, 'AAPL', 50) o2.add_position(50, 21) o2.fulfill_time = datetime.datetime.now() e2 = threading.Event() listeners += lambda x: e2.set() if x['type' ] == 'store_object' else None fulfilled_orders(o2) e2.wait() obj = store.restore(client.test_db.store, pm._id) self.assertEqual(obj._id, pm._id) self.assertEqual(len(obj.orders), 2) self.assertEqual(obj.initial_capital, 10000) finally: client.drop_database('test_db')
def test_bars_2(self): filter_provider = DefaultFilterProvider() filter_provider += BarsInPeriodFilter( ticker=["IBM", "AAPL"], bgn_prd=datetime.datetime(2017, 3, 1), end_prd=datetime.datetime(2017, 3, 2), interval_len=60, ascend=True, interval_type='s', max_ticks=20) listeners = AsyncListeners() with IQFeedHistoryEvents( listeners=listeners, fire_batches=True, num_connections=2, filter_provider=filter_provider ) as listener, listener.batch_provider() as provider: e1 = threading.Event() def process_batch_listener_column(event): if event['type'] == 'bar_batch': batch = event['data'] self.assertEqual(len(batch.index.levels), 2) self.assertEqual(batch.loc['AAPL'].shape[1], 9) self.assertEqual(batch.loc['IBM'].shape[1], 9) self.assertFalse(batch.isnull().values.any()) e1.set() listeners += process_batch_listener_column listener.start() e1.wait() for i, d in enumerate(provider): self.assertEqual(len(d.index.levels), 2) self.assertEqual(d.loc['AAPL'].shape[1], 9) self.assertEqual(d.loc['IBM'].shape[1], 9) self.assertFalse(d.isnull().values.any()) if i == 1: break
def test_bar_split_adjust_1(self): filter_provider = DefaultFilterProvider() filter_provider += BarsInPeriodFilter( ticker="PLUS", bgn_prd=datetime.datetime(2017, 3, 31), end_prd=datetime.datetime(2017, 4, 5), interval_len=3600, ascend=True, interval_type='s', max_ticks=100) listeners = AsyncListeners() with IQFeedHistoryEvents( listeners=listeners, fire_batches=True, filter_provider=filter_provider, timestamp_first=True, num_connections=2) as listener, listener.batch_provider( ) as provider: e1 = threading.Event() def process_bar(event): if event['type'] == 'bar_batch': d = event['data'] try: self.assertLess(d['open'].max(), 68) self.assertGreater(d['open'].min(), 65) finally: e1.set() listeners += process_bar listener.start() e1.wait() for i, d in enumerate(provider): self.assertLess(d['open'].max(), 68) self.assertGreater(d['open'].min(), 65) if i == 1: break
def test_bar(self): filter_provider = DefaultFilterProvider() filter_provider += BarsFilter(ticker="IBM", interval_len=60, interval_type='s', max_bars=20) listeners = AsyncListeners() with IQFeedHistoryEvents( listeners=listeners, fire_batches=True, filter_provider=filter_provider, num_connections=2) as listener, listener.batch_provider( ) as provider: e1 = threading.Event() def process_batch_listener_column(event): if event['type'] == 'bar_batch': batch = event['data'] self.assertEqual(batch.shape, (20, 9)) e1.set() listeners += process_batch_listener_column listener.start() e1.wait() for i, d in enumerate(provider): self.assertEqual(d.shape, (20, 9)) self.assertNotEqual(d['timestamp'].iloc[0], d['timestamp'].iloc[1]) if i == 1: break
def test_historical_bar_mock_orders(self): with IQFeedHistoryProvider() as provider: listeners = AsyncListeners() f = BarsFilter(ticker=["GOOG", "AAPL", "IBM"], interval_len=60, interval_type='s', max_bars=5) data = provider.request_data(f, sync_timestamps=False).swaplevel( 0, 1).sort_index() dre = DataReplayEvents(listeners=listeners, data_replay=DataReplay().add_source( [data], 'data', historical_depth=5), event_name='bar') bars = dre.event_filter_by_source('data') order_request_events = SyncListeners() me = MockExchange(listeners=listeners, order_requests_event_stream=order_request_events, bar_event_stream=bars, order_processor=StaticSlippageLoss(0.1), commission_loss=PerShareCommissionLoss(0.1)) pm = PortfolioManager( listeners=listeners, initial_capital=10000, bar_event_stream=bars, fulfilled_orders_event_stream=me.fulfilled_orders_stream()) portfolio_updates = pm.portfolio_updates_stream() o1 = StopLimitOrder(Type.BUY, 'GOOG', 1, 99999, 1) o2 = StopLimitOrder(Type.BUY, 'AAPL', 3, 99999, 1) o3 = StopLimitOrder(Type.BUY, 'IBM', 1, 99999, 1) o4 = StopLimitOrder(Type.SELL, 'AAPL', 1, 1, 99999) order_request_events(o1) order_request_events(o2) order_request_events(o3) order_request_events(o4) e1 = threading.Event() portfolio_updates += lambda x: e1.set( ) if 'GOOG' in x.symbols else None e2 = threading.Event() portfolio_updates += lambda x: e2.set( ) if 'AAPL' in x.symbols else None e3 = threading.Event() portfolio_updates += lambda x: e3.set( ) if 'IBM' in x.symbols else None dre.start() e1.wait() e2.wait() e3.wait() self.assertLess(pm.capital, pm.initial_capital) self.assertTrue('GOOG' in pm.symbols) self.assertTrue('AAPL' in pm.symbols) self.assertTrue('IBM' in pm.symbols) self.assertEqual(pm.quantity('GOOG'), 1) self.assertEqual(pm.quantity('AAPL'), 2) self.assertEqual(pm.quantity('IBM'), 1) self.assertGreater(pm.value('GOOG'), 0) self.assertGreater(pm.value('AAPL'), 0) self.assertGreater(pm.value('IBM'), 0)
def test_historical_price_updates(self): listeners = AsyncListeners() fulfilled_orders = SyncListeners() # order 1 o1 = MarketOrder(Type.BUY, 'GOOG', 100) o1.add_position(14, 1) o1.add_position(86, 1) o1.fulfill_time = datetime.datetime.now() e1 = threading.Event() listeners += lambda x: e1.set() if x[ 'type'] == 'portfolio_value_update' else None # order 2 o2 = MarketOrder(Type.BUY, 'GOOG', 90) o2.add_position(4, 0.5) o2.add_position(86, 0.5) o2.fulfill_time = datetime.datetime.now() # order 3 o3 = MarketOrder(Type.BUY, 'AAPL', 100) o3.add_position(14, 0.2) o3.add_position(86, 0.2) o3.fulfill_time = datetime.datetime.now() e3 = threading.Event() listeners += lambda x: e3.set() if x[ 'type'] == 'portfolio_value_update' else None # historical data with IQFeedHistoryProvider() as provider: f = BarsFilter(ticker=["GOOG", "AAPL"], interval_len=60, interval_type='s', max_bars=5) data = provider.request_data(f, sync_timestamps=False).swaplevel( 0, 1).sort_index() dre = DataReplayEvents(listeners, DataReplay().add_source([data], 'data', historical_depth=2), event_name='bar') pm = PortfolioManager( listeners=listeners, initial_capital=10000, fulfilled_orders_event_stream=fulfilled_orders, bar_event_stream=dre.event_filter_by_source('data')) fulfilled_orders(o1) fulfilled_orders(o3) dre.start() e1.wait() e3.wait() self.assertNotEqual(pm.value('GOOG'), 1) self.assertNotEqual(pm.value('GOOG'), 0.5) self.assertNotEqual(pm.value('AAPL'), 0.2) self.assertEqual(len(pm._values), 2)
def test_price_updates(self): listeners = AsyncListeners() with IQFeedLevel1Listener(listeners=listeners, mkt_snapshot_depth=2) as level_1: listeners = AsyncListeners() fulfilled_orders = SyncListeners() pm = PortfolioManager( listeners=listeners, initial_capital=10000, tick_event_stream=level_1.all_level_1_filter(), fulfilled_orders_event_stream=fulfilled_orders) # order 1 o1 = MarketOrder(Type.BUY, 'GOOG', 100) o1.add_position(14, 1) o1.add_position(86, 1) o1.fulfill_time = datetime.datetime.now() e1 = threading.Event() listeners += lambda x: e1.set() if x[ 'type'] == 'portfolio_value_update' and pm.value( 'GOOG') > 1 else None fulfilled_orders(o1) level_1.watch('GOOG') e1.wait() self.assertNotEqual(pm.value('GOOG'), 1) # order 2 o2 = MarketOrder(Type.BUY, 'GOOG', 90) o2.add_position(4, 0.5) o2.add_position(86, 0.5) o2.fulfill_time = datetime.datetime.now() self.assertNotEqual(pm.value('GOOG'), 1) self.assertNotEqual(pm.value('GOOG'), 0.5) # order 3 o3 = MarketOrder(Type.BUY, 'AAPL', 100) o3.add_position(14, 0.2) o3.add_position(86, 0.2) o3.fulfill_time = datetime.datetime.now() e3 = threading.Event() listeners += lambda x: e3.set() if x[ 'type'] == 'portfolio_value_update' and pm.value( 'AAPL') > 0.2 else None fulfilled_orders(o3) level_1.watch('AAPL') e3.wait() self.assertNotEqual(pm.value('GOOG'), 1) self.assertNotEqual(pm.value('GOOG'), 0.5) self.assertNotEqual(pm.value('AAPL'), 0.2) self.assertEqual(len(pm._values), 2)
def test_1(self): listeners = AsyncListeners() fulfilled_orders = SyncListeners() pm = PortfolioManager(listeners=listeners, initial_capital=10000, fulfilled_orders_event_stream=fulfilled_orders) # order 1 o1 = MarketOrder(Type.BUY, 'GOOG', 100) o1.add_position(14, 23) o1.add_position(86, 24) o1.fulfill_time = datetime.datetime.now() e1 = threading.Event() listeners += lambda x: e1.set() fulfilled_orders(o1) e1.wait() self.assertEqual(len(pm.quantity()), 1) self.assertEqual(pm.quantity()['GOOG'], 100) self.assertEqual(pm.quantity('GOOG'), 100) self.assertEqual(pm.value('GOOG', multiply_by_quantity=True), 100 * 24) self.assertEqual(len(pm.value()), 1) self.assertEqual(pm.value(multiply_by_quantity=True)['GOOG'], 100 * 24) self.assertEqual(pm.capital, 10000 - (14 * 23 + 86 * 24)) # order 2 o2 = MarketOrder(Type.BUY, 'GOOG', 150) o2.add_position(110, 25) o2.add_position(30, 26) o2.add_position(10, 27) o2.fulfill_time = datetime.datetime.now() e2 = threading.Event() pm.listeners += lambda x: e2.set() fulfilled_orders(o2) e2.wait() self.assertEqual(len(pm.quantity()), 1) self.assertEqual(pm.quantity()['GOOG'], 250) self.assertEqual(pm.quantity('GOOG'), 250) self.assertEqual(pm.value('GOOG', multiply_by_quantity=True), 250 * 27) self.assertEqual(len(pm.value()), 1) self.assertEqual(pm.value(multiply_by_quantity=True)['GOOG'], 250 * 27) self.assertEqual( pm.capital, 10000 - (14 * 23 + 86 * 24 + 110 * 25 + 30 * 26 + 10 * 27)) # order 3 o3 = MarketOrder(Type.SELL, 'GOOG', 60) o3.add_position(60, 22) o3.fulfill_time = datetime.datetime.now() e3 = threading.Event() pm.listeners += lambda x: e3.set() fulfilled_orders(o3) e3.wait() self.assertEqual(len(pm.quantity()), 1) self.assertEqual(pm.quantity()['GOOG'], 190) self.assertEqual(pm.quantity('GOOG'), 190) self.assertEqual(pm.value('GOOG'), 22) self.assertEqual(len(pm.value()), 1) self.assertEqual(pm.value()['GOOG'], 22) self.assertEqual( pm.capital, 10000 - (14 * 23 + 86 * 24 + 110 * 25 + 30 * 26 + 10 * 27) + 60 * 22) # order 4 o4 = MarketOrder(Type.BUY, 'AAPL', 50) o4.add_position(50, 21) o4.fulfill_time = datetime.datetime.now() e4 = threading.Event() pm.listeners += lambda x: e4.set() fulfilled_orders(o4) e4.wait() self.assertEqual(len(pm.quantity()), 2) self.assertEqual(pm.quantity()['AAPL'], 50) self.assertEqual(pm.quantity('AAPL'), 50) self.assertEqual(pm.value('AAPL', multiply_by_quantity=True), 50 * 21) self.assertEqual(len(pm.value()), 2) self.assertEqual(pm.value(multiply_by_quantity=True)['AAPL'], 50 * 21) self.assertEqual( pm.capital, 10000 - (14 * 23 + 86 * 24 + 110 * 25 + 30 * 26 + 10 * 27 + 50 * 21) + 60 * 22)
def test_request_ohlc(self): listeners = AsyncListeners() with IQFeedHistoryProvider(num_connections=2) as history: streaming_conn = iq.QuoteConn() streaming_conn.connect() end_prd = datetime.datetime(2017, 5, 1) # test single symbol request filters = (BarsInPeriodFilter(ticker="IBM", bgn_prd=datetime.datetime( 2017, 4, 1), end_prd=end_prd, interval_len=3600, ascend=True, interval_type='s'), BarsInPeriodFilter(ticker="AAPL", bgn_prd=datetime.datetime( 2017, 4, 1), end_prd=end_prd, interval_len=3600, ascend=True, interval_type='s'), BarsInPeriodFilter(ticker="AAPL", bgn_prd=datetime.datetime( 2017, 4, 1), end_prd=end_prd, interval_len=600, ascend=True, interval_type='s')) update_splits_dividends(client=self._client, fundamentals=get_fundamentals( {'IBM', 'AAPL'}, streaming_conn).values()) adjusted = list() for f in filters: datum = history.request_data(f, sync_timestamps=False) datum.drop('timestamp', axis=1, inplace=True) datum['interval'] = str(f.interval_len) + '_' + f.interval_type self._client.write_points(datum, 'bars', protocol='line', tag_columns=['symbol', 'interval'], time_precision='s') datum.drop('interval', axis=1, inplace=True) datum = history.request_data(f, sync_timestamps=False) adjust_df( datum, get_adjustments(client=self._client, symbol=f.ticker)) adjusted.append(datum) cache_requests = InfluxDBOHLCRequest( client=self._client, interval_len=f.interval_len, interval_type=f.interval_type) _, test_data = cache_requests.request(symbol=f.ticker) adjust_df( test_data, get_adjustments(client=self._client, symbol=f.ticker)) del datum['total_volume'] del datum['number_of_trades'] assert_frame_equal(datum, test_data) for datum, f in zip(adjusted, filters): cache_requests = InfluxDBOHLCRequest( client=self._client, interval_len=f.interval_len, interval_type=f.interval_type) _, test_data = cache_requests.request(symbol=f.ticker) _, test_data_limit = cache_requests.request( symbol=f.ticker, bgn_prd=f.bgn_prd + relativedelta(days=7), end_prd=f.end_prd - relativedelta(days=7)) self.assertGreater(len(test_data_limit), 0) self.assertLess(len(test_data_limit), len(test_data)) # test multisymbol request requested_data = history.request_data(BarsInPeriodFilter( ticker=["AAPL", "IBM"], bgn_prd=datetime.datetime(2017, 4, 1), end_prd=end_prd, interval_len=3600, ascend=True, interval_type='s'), sync_timestamps=False) requested_data = requested_data.swaplevel(0, 1).sort_index() del requested_data['total_volume'] del requested_data['number_of_trades'] cache_requests = InfluxDBOHLCRequest(client=self._client, interval_len=3600, listeners=listeners) _, test_data = cache_requests.request( symbol=['IBM', 'AAPL', 'TSG'], bgn_prd=datetime.datetime(2017, 4, 1), end_prd=end_prd) assert_frame_equal(requested_data, test_data) # test any symbol request requested_data = history.request_data(BarsInPeriodFilter( ticker=["AAPL", "IBM"], bgn_prd=datetime.datetime(2017, 4, 1), end_prd=end_prd, interval_len=3600, ascend=True, interval_type='s'), sync_timestamps=False) requested_data = requested_data.swaplevel(0, 1).sort_index() del requested_data['total_volume'] del requested_data['number_of_trades'] e = threading.Event() def listen(event): if event['type'] == 'cache_result': assert_frame_equal(requested_data, event['data'][0]) e.set() listeners += listen listeners({ 'type': 'request_ohlc', 'data': { 'bgn_prd': datetime.datetime(2017, 4, 1), 'end_prd': end_prd } }) e.wait() streaming_conn.disconnect()