Ejemplo n.º 1
0
    def test_4_validity(self):
        logging.basicConfig(level=logging.DEBUG)

        batch_len = 10000
        batch_width = 500

        with IQFeedHistoryProvider() as provider:
            now = datetime.datetime.now()

            q = queue.Queue()
            provider.request_data_by_filters([BarsFilter(ticker="AAPL", interval_len=60, interval_type='s', max_bars=batch_len),
                                              BarsFilter(ticker="IBM", interval_len=60, interval_type='s', max_bars=batch_len)],
                                             q)

            df1 = q.get()[1]
            dfs1 = {'AAPL': df1}
            for i in range(batch_width):
                dfs1['AAPL_' + str(i)] = df1.sample(random.randint(int(len(df1) / 3), len(df1) - 1))

            dfs1 = pd.concat(dfs1).swaplevel(0, 1)
            dfs1.sort_index(inplace=True)

            df2 = q.get()[1]
            dfs2 = {'IBM': df2}
            for i in range(batch_width):
                dfs2['IBM_' + str(i)] = df2.sample(random.randint(int(len(df2) / 3), len(df2) - 1))

            dfs2 = pd.concat(dfs2).swaplevel(0, 1)
            dfs2.sort_index(inplace=True)

            logging.getLogger(__name__).debug('Random data generated in ' + str(datetime.datetime.now() - now) + ' with shapes ' + str(dfs1.shape) + ', ' + str(dfs2.shape))

            dr = DataReplay().add_source([dfs1], 'e1', historical_depth=100).add_source([dfs2], 'e2', historical_depth=100)
            prev_t = None
            now = datetime.datetime.now()

            for i, r in enumerate(dr):
                if i % 1000 == 0 and i > 0:
                    new_now = datetime.datetime.now()
                    elapsed = new_now - now
                    logging.getLogger(__name__).debug('Time elapsed ' + str(elapsed) + ' for ' + str(i) + ' iterations; ' + str(elapsed / 1000) + ' per iteration')
                    now = new_now

                for e in [e for e in r if isinstance(r[e], pd.DataFrame)]:
                    x, a = current_period(r[e])
                    self.assertFalse(x.empty)
                    t = r[e].iloc[-1]['timestamp']

                if prev_t is not None:
                    self.assertGreater(t, prev_t)

                prev_t = t
                self.assertTrue(isinstance(r, dict))
                self.assertGreaterEqual(len(r), 1)

            elapsed = datetime.datetime.now() - now
            logging.getLogger(__name__).debug('Time elapsed ' + str(elapsed) + ' for ' + str(i + 1) + ' iterations; ' + str(elapsed / (i % 1000)) + ' per iteration')

            self.assertIsNotNone(t)
            self.assertIsNotNone(prev_t)
Ejemplo n.º 2
0
    def test_events(self):
        batch_len = 1000

        with IQFeedHistoryProvider() as provider:
            q = queue.Queue()
            provider.request_data_by_filters([BarsFilter(ticker="IBM", interval_len=60, interval_type='s', max_bars=batch_len),
                                              BarsFilter(ticker="AAPL", interval_len=60, interval_type='s', max_bars=batch_len)],
                                             q)

            listeners = SyncListeners()

            timestamps = set()

            def check_df(event):
                if event is not None and event['type'] == 'data':
                    for e in event:
                        if isinstance(event[e], pd.DataFrame):
                            t = event[e].iloc[0]['timestamp']

                    if len(timestamps) > 0:
                        self.assertGreater(t, max(timestamps))

                    timestamps.add(t)

                    self.assertTrue(isinstance(event, dict))
                    self.assertGreaterEqual(len(event), 2)

            listeners += check_df

            data_replay = DataReplay().add_source([q.get()[1]], 'e1').add_source([q.get()[1]], 'e2')
            DataReplayEvents(listeners=listeners, data_replay=data_replay, event_name='data').start()

            self.assertGreaterEqual(len(timestamps), batch_len)
Ejemplo n.º 3
0
    def test_basic(self):
        batch_len = 1000

        with IQFeedHistoryProvider() as provider:
            q = queue.Queue()
            provider.request_data_by_filters([BarsFilter(ticker="IBM", interval_len=60, interval_type='s', max_bars=batch_len),
                                              BarsFilter(ticker="AAPL", interval_len=60, interval_type='s', max_bars=batch_len)],
                                             q)

            timestamps = set()

            dr = DataReplay().add_source(AsyncInPeriodProvider([q.get()[1]]), 'e1').add_source([q.get()[1]], 'e2')

            for i, r in enumerate(dr):
                for e in [e for e in r if isinstance(r[e], pd.DataFrame)]:
                    t = r[e].iloc[0]['timestamp']

                if len(timestamps) > 0:
                    self.assertGreater(t, max(timestamps))

                timestamps.add(t)

                self.assertTrue(isinstance(r, dict))
                self.assertGreaterEqual(len(r), 1)

            self.assertGreaterEqual(len(timestamps), batch_len)
Ejemplo n.º 4
0
    def test_historical_bar_market_order(self):
        listeners = AsyncListeners()

        with IQFeedHistoryProvider() as provider:
            f = BarsFilter(ticker=["GOOG", "AAPL", "IBM"], interval_len=60, interval_type='s', max_bars=20)
            data = provider.request_data(f, sync_timestamps=False).swaplevel(0, 1).sort_index()
            dre = DataReplayEvents(listeners=listeners,
                                   data_replay=DataReplay().add_source([data], 'data', historical_depth=5),
                                   event_name='bar')

            order_request_events = SyncListeners()

            me = MockExchange(listeners=listeners,
                              order_requests_event_stream=order_request_events,
                              bar_event_stream=dre.event_filter_by_source('data'),
                              order_processor=StaticSlippageLoss(0.1),
                              commission_loss=PerShareCommissionLoss(0.1))

            fulfilled_orders = me.fulfilled_orders_stream()

            e1 = threading.Event()
            fulfilled_orders += lambda x: e1.set() if x.symbol == 'GOOG' else None

            e2 = threading.Event()
            fulfilled_orders += lambda x: e2.set() if x.symbol == 'AAPL' else None

            e3 = threading.Event()
            fulfilled_orders += lambda x: e3.set() if x.symbol == 'IBM' else None

            o1 = MarketOrder(Type.BUY, 'GOOG', 1)
            order_request_events(o1)

            o2 = MarketOrder(Type.BUY, 'AAPL', 3)
            order_request_events(o2)

            o3 = MarketOrder(Type.SELL, 'IBM', 1)
            order_request_events(o3)

            dre.start()

            e1.wait()
            e2.wait()
            e3.wait()

        self.assertEqual(o1.obtained_quantity, 1)
        self.assertGreater(o1.cost, 0)
        self.assertIsNotNone(o1.fulfill_time)

        self.assertEqual(o2.obtained_quantity, 3)
        self.assertGreater(o2.cost, 0)
        self.assertIsNotNone(o2.fulfill_time)

        self.assertEqual(o3.obtained_quantity, 1)
        self.assertGreater(o3.cost, 0)
        self.assertIsNotNone(o3.fulfill_time)
Ejemplo n.º 5
0
    def test_2(self):
        historical_depth = 10
        with IQFeedHistoryProvider(num_connections=1) as provider:
            year = datetime.datetime.now().year - 1

            l1 = [provider.request_data(BarsInPeriodFilter(ticker=["MSFT", "AAPL"], bgn_prd=datetime.datetime(year, 3, 1), end_prd=datetime.datetime(year, 4, 1), interval_len=3600, ascend=True, interval_type='s'), sync_timestamps=False).swaplevel(0, 1).sort_index(),
                  provider.request_data(BarsInPeriodFilter(ticker=["MSFT", "AAPL"], bgn_prd=datetime.datetime(year, 4, 2), end_prd=datetime.datetime(year, 5, 1), interval_len=3600, ascend=True, interval_type='s'), sync_timestamps=False).swaplevel(0, 1).sort_index(),
                  provider.request_data(BarsInPeriodFilter(ticker=["MSFT", "AAPL"], bgn_prd=datetime.datetime(year, 5, 2), end_prd=datetime.datetime(year, 6, 1), interval_len=3600, ascend=True, interval_type='s'), sync_timestamps=False).swaplevel(0, 1).sort_index(),
                  provider.request_data(BarsInPeriodFilter(ticker=["MSFT", "AAPL"], bgn_prd=datetime.datetime(year, 8, 2), end_prd=datetime.datetime(year, 9, 1), interval_len=3600, ascend=True, interval_type='s'), sync_timestamps=False).swaplevel(0, 1).sort_index()]

            q2 = queue.Queue()
            provider.request_data_by_filters([BarsInPeriodFilter(ticker="IBM", bgn_prd=datetime.datetime(year, 4, 1), end_prd=datetime.datetime(year, 5, 1), interval_len=3600, ascend=True, interval_type='s'),
                                              BarsInPeriodFilter(ticker="IBM", bgn_prd=datetime.datetime(year, 5, 2), end_prd=datetime.datetime(year, 6, 1), interval_len=3600, ascend=True, interval_type='s'),
                                              BarsInPeriodFilter(ticker="IBM", bgn_prd=datetime.datetime(year, 6, 2), end_prd=datetime.datetime(year, 7, 1), interval_len=3600, ascend=True, interval_type='s')],
                                             q2)

            l2 = [q2.get()[1], q2.get()[1], q2.get()[1]]

            maxl = max(max([len(l) for l in l1]), max([len(l) for l in l2]))
            timestamps = set()
            counters = {'e1': 0, 'e2': 0}

            dr = DataReplay().add_source(AsyncInPeriodProvider(l1), 'e1', historical_depth=historical_depth).add_source(l2, 'e2', historical_depth=historical_depth)

            for r in dr:
                for e in [e for e in r if isinstance(r[e], pd.DataFrame)]:
                    t = r[e].iloc[-1]['timestamp']

                if len(timestamps) > 0:
                    self.assertGreater(t, max(timestamps))

                for e in [e for e in r if isinstance(r[e], pd.DataFrame) and e != 'e1']:
                    df = r[e]
                    self.assertTrue(df.index.is_monotonic)

                    counters[e] = 1 if e not in counters else counters[e] + 1
                    self.assertEqual(df.shape[0], min(counters[e], historical_depth + 1))

                timestamps.add(t)

                self.assertTrue(isinstance(r, dict))
                self.assertGreaterEqual(len(r), 1)

            self.assertGreater(maxl, 0)
            self.assertGreaterEqual(len(timestamps), maxl)

            months = set()
            for t in timestamps:
                months.add(t.month)

            self.assertTrue({3, 4, 5, 6, 8} < months)
Ejemplo n.º 6
0
    def test_3_performance(self):
        logging.basicConfig(level=logging.DEBUG)

        batch_len = 10000
        batch_width = 500

        with IQFeedHistoryProvider() as provider:
            now = datetime.datetime.now()

            q = queue.Queue()
            provider.request_data_by_filters([BarsFilter(ticker="AAPL", interval_len=60, interval_type='s', max_bars=batch_len),
                                              BarsFilter(ticker="IBM", interval_len=60, interval_type='s', max_bars=batch_len)],
                                             q)

            df1 = q.get()[1]
            dfs1 = {'AAPL': df1}
            for i in range(batch_width):
                dfs1['AAPL_' + str(i)] = df1.sample(random.randint(int(len(df1) / 3), len(df1) - 1))

            dfs1 = pd.concat(dfs1).swaplevel(0, 1)
            dfs1.sort_index(inplace=True)

            logging.getLogger(__name__).debug('Random data generated in ' + str(datetime.datetime.now() - now) + ' with shapes ' + str(dfs1.shape))

            now = datetime.datetime.now()

            dr = DataReplay().add_source([dfs1], 'e1', historical_depth=100)

            for i, r in enumerate(dr):
                if i % 1000 == 0 and i > 0:
                    new_now = datetime.datetime.now()
                    elapsed = new_now - now
                    logging.getLogger(__name__).debug('Time elapsed ' + str(elapsed) + ' for ' + str(i) + ' iterations; ' + str(elapsed / 1000) + ' per iteration')
                    now = new_now

                for e in [e for e in r if isinstance(r[e], pd.DataFrame)]:
                    current_period(r[e])

            elapsed = datetime.datetime.now() - now
            logging.getLogger(__name__).debug('Time elapsed ' + str(elapsed) + ' for ' + str(i + 1) + ' iterations; ' + str(elapsed / (i % 1000)) + ' per iteration')
Ejemplo n.º 7
0
    def test_data_replay(self):
        table_name = 'quandl_sf0'
        url = 'postgresql://*****:*****@localhost:5432/test'
        con = psycopg2.connect(url)
        con.autocommit = True

        dates = set()
        try:
            engine = create_engine(url)

            bulkinsert_SF0(url, table_name=table_name)

            now = datetime.datetime.now()
            bars_in_period = SFInPeriodProvider(
                conn=engine,
                table_name=table_name,
                bgn_prd=datetime.datetime(year=now.year - 10, month=1, day=1),
                delta=relativedelta(years=1),
                overlap=relativedelta(microseconds=-1))

            dr = DataReplay().add_source(bars_in_period,
                                         'e1',
                                         historical_depth=2)

            for i, r in enumerate(dr):
                d = r['e1'].iloc[-1].name[0].to_pydatetime()

                if len(dates) > 0:
                    self.assertGreater(d, max(dates))

                dates.add(d)

                self.assertTrue(isinstance(r, dict))
                self.assertGreaterEqual(len(r), 1)
        finally:
            con.cursor().execute(
                "DROP TABLE IF EXISTS {0};".format(table_name))
Ejemplo n.º 8
0
    def test_historical_bar_mock_orders(self):
        with IQFeedHistoryProvider() as provider:
            listeners = AsyncListeners()

            f = BarsFilter(ticker=["GOOG", "AAPL", "IBM"],
                           interval_len=60,
                           interval_type='s',
                           max_bars=5)
            data = provider.request_data(f, sync_timestamps=False).swaplevel(
                0, 1).sort_index()
            dre = DataReplayEvents(listeners=listeners,
                                   data_replay=DataReplay().add_source(
                                       [data], 'data', historical_depth=5),
                                   event_name='bar')

            bars = dre.event_filter_by_source('data')

            order_request_events = SyncListeners()

            me = MockExchange(listeners=listeners,
                              order_requests_event_stream=order_request_events,
                              bar_event_stream=bars,
                              order_processor=StaticSlippageLoss(0.1),
                              commission_loss=PerShareCommissionLoss(0.1))

            pm = PortfolioManager(
                listeners=listeners,
                initial_capital=10000,
                bar_event_stream=bars,
                fulfilled_orders_event_stream=me.fulfilled_orders_stream())

            portfolio_updates = pm.portfolio_updates_stream()

            o1 = StopLimitOrder(Type.BUY, 'GOOG', 1, 99999, 1)
            o2 = StopLimitOrder(Type.BUY, 'AAPL', 3, 99999, 1)
            o3 = StopLimitOrder(Type.BUY, 'IBM', 1, 99999, 1)
            o4 = StopLimitOrder(Type.SELL, 'AAPL', 1, 1, 99999)

            order_request_events(o1)
            order_request_events(o2)
            order_request_events(o3)
            order_request_events(o4)

            e1 = threading.Event()
            portfolio_updates += lambda x: e1.set(
            ) if 'GOOG' in x.symbols else None

            e2 = threading.Event()
            portfolio_updates += lambda x: e2.set(
            ) if 'AAPL' in x.symbols else None

            e3 = threading.Event()
            portfolio_updates += lambda x: e3.set(
            ) if 'IBM' in x.symbols else None

            dre.start()

            e1.wait()
            e2.wait()
            e3.wait()

        self.assertLess(pm.capital, pm.initial_capital)
        self.assertTrue('GOOG' in pm.symbols)
        self.assertTrue('AAPL' in pm.symbols)
        self.assertTrue('IBM' in pm.symbols)

        self.assertEqual(pm.quantity('GOOG'), 1)
        self.assertEqual(pm.quantity('AAPL'), 2)
        self.assertEqual(pm.quantity('IBM'), 1)

        self.assertGreater(pm.value('GOOG'), 0)
        self.assertGreater(pm.value('AAPL'), 0)
        self.assertGreater(pm.value('IBM'), 0)
Ejemplo n.º 9
0
    def test_historical_price_updates(self):
        listeners = AsyncListeners()
        fulfilled_orders = SyncListeners()

        # order 1
        o1 = MarketOrder(Type.BUY, 'GOOG', 100)
        o1.add_position(14, 1)
        o1.add_position(86, 1)
        o1.fulfill_time = datetime.datetime.now()

        e1 = threading.Event()
        listeners += lambda x: e1.set() if x[
            'type'] == 'portfolio_value_update' else None

        # order 2
        o2 = MarketOrder(Type.BUY, 'GOOG', 90)
        o2.add_position(4, 0.5)
        o2.add_position(86, 0.5)
        o2.fulfill_time = datetime.datetime.now()

        # order 3
        o3 = MarketOrder(Type.BUY, 'AAPL', 100)
        o3.add_position(14, 0.2)
        o3.add_position(86, 0.2)
        o3.fulfill_time = datetime.datetime.now()

        e3 = threading.Event()
        listeners += lambda x: e3.set() if x[
            'type'] == 'portfolio_value_update' else None

        # historical data
        with IQFeedHistoryProvider() as provider:
            f = BarsFilter(ticker=["GOOG", "AAPL"],
                           interval_len=60,
                           interval_type='s',
                           max_bars=5)
            data = provider.request_data(f, sync_timestamps=False).swaplevel(
                0, 1).sort_index()
            dre = DataReplayEvents(listeners,
                                   DataReplay().add_source([data],
                                                           'data',
                                                           historical_depth=2),
                                   event_name='bar')

            pm = PortfolioManager(
                listeners=listeners,
                initial_capital=10000,
                fulfilled_orders_event_stream=fulfilled_orders,
                bar_event_stream=dre.event_filter_by_source('data'))

            fulfilled_orders(o1)
            fulfilled_orders(o3)

            dre.start()

            e1.wait()
            e3.wait()

        self.assertNotEqual(pm.value('GOOG'), 1)
        self.assertNotEqual(pm.value('GOOG'), 0.5)
        self.assertNotEqual(pm.value('AAPL'), 0.2)
        self.assertEqual(len(pm._values), 2)
Ejemplo n.º 10
0
    def test_current_day(self):
        logging.basicConfig(level=logging.DEBUG)

        batch_len = 10000
        batch_width = 5000

        with IQFeedHistoryProvider() as provider:
            l1, l2 = list(), list()

            dr = DataReplay().add_source(l1, 'e1', historical_depth=100)

            now = datetime.datetime.now()
            df = provider.request_data(BarsFilter(ticker="AAPL",
                                                  interval_len=3600,
                                                  interval_type='s',
                                                  max_bars=batch_len),
                                       sync_timestamps=False)

            dfs1 = {'AAPL': df}
            for i in range(batch_width):
                dfs1['AAPL_' + str(i)] = df.sample(
                    random.randint(int(len(df) / 3),
                                   len(df) - 1))

            df = pd.concat(dfs1).swaplevel(0, 1)
            df.reset_index(level='symbol', inplace=True)
            df.sort_index(inplace=True)
            df.set_index('level_1', drop=False, append=True, inplace=True)
            l1.append(df)

            logging.getLogger(__name__).debug('Random data generated in ' +
                                              str(datetime.datetime.now() -
                                                  now) + ' with shapes ' +
                                              str(df.shape))

            now = datetime.datetime.now()

            for i, r in enumerate(dr):
                if i % 1000 == 0 and i > 0:
                    new_now = datetime.datetime.now()
                    elapsed = new_now - now
                    logging.getLogger(__name__).debug('Time elapsed ' +
                                                      str(elapsed) + ' for ' +
                                                      str(i) +
                                                      ' iterations; ' +
                                                      str(elapsed / 1000) +
                                                      ' per iteration')
                    self.assertGreater(10000, (elapsed / 1000).microseconds)
                    now = new_now

                for e in r:
                    current_day(r[e], 'US/Eastern')
                    period = current_day(r[e])
                    self.assertTrue(not period.empty)
                    self.assertEqual(period.iloc[0].name[0].date(),
                                     period.iloc[1].name[0].date())

            elapsed = datetime.datetime.now() - now
            logging.getLogger(__name__).debug('Time elapsed ' + str(elapsed) +
                                              ' for ' + str(i + 1) +
                                              ' iterations; ' +
                                              str(elapsed / (i % 1000)) +
                                              ' per iteration')
Ejemplo n.º 11
0
    def test_5(self):
        logging.basicConfig(level=logging.DEBUG)

        batch_len = 1000
        batch_width = 500

        with IQFeedHistoryProvider() as provider:
            now = datetime.datetime.now()

            q = queue.Queue()
            provider.request_data_by_filters([BarsFilter(ticker="AAPL", interval_len=60, interval_type='s', max_bars=batch_len)], q)

            df1 = q.get()[1]
            dfs1 = {'AAPL': df1}
            for i in range(batch_width):
                dfs1['AAPL_' + str(i)] = df1.sample(random.randint(int(len(df1) / 3), len(df1) - 1))

            dfs1 = pd.concat(dfs1).swaplevel(0, 1).sort_index()

            dr = DataReplay().add_source([dfs1], 'e1', historical_depth=0)
            logging.getLogger(__name__).debug('Random data generated in ' + str(datetime.datetime.now() - now) + ' with shape ' + str(dfs1.shape))

            prev_t = None
            now = datetime.datetime.now()

            listeners = SyncListeners()
            lb = LatestDataSnapshot(listeners=listeners, event='event', fire_update=True, depth=100)

            j = 0

            snapshots_count = {'count': 0}

            def snapshot_listener(event):
                if event['type'] == 'event_snapshot':
                    self.assertEqual(len(event['data'].index.levels[0]), min(lb.depth, j + 1))
                    snapshots_count['count'] += 1

            listeners += snapshot_listener
            for i, r in enumerate(dr):
                j = i
                for a in [e for e in r if isinstance(r[e], pd.DataFrame)]:
                    lb.on_event({'type': 'event', 'data': r[a]})

                if i % 100 == 0 and i > 0:
                    new_now = datetime.datetime.now()
                    elapsed = new_now - now
                    logging.getLogger(__name__).debug('Time elapsed ' + str(elapsed) + ' for ' + str(i) + ' iterations; ' + str(elapsed / 100) + ' per iteration')
                    now = new_now

                for e in [e for e in r if isinstance(r[e], pd.DataFrame)]:
                    t = r[e].iloc[-1]['timestamp']

                if prev_t is not None:
                    self.assertGreater(t, prev_t)

                prev_t = t
                self.assertTrue(isinstance(r, dict))
                self.assertGreaterEqual(len(r), 1)

            elapsed = datetime.datetime.now() - now
            logging.getLogger(__name__).debug('Time elapsed ' + str(elapsed) + ' for ' + str(i + 1) + ' iterations; ' + str(elapsed / (i % 100)) + ' per iteration')

            self.assertIsNotNone(t)
            self.assertIsNotNone(prev_t)
            self.assertEqual(batch_len, snapshots_count['count'])
Ejemplo n.º 12
0
def data_replay_events(listeners):
    return DataReplayEvents(listeners, DataReplay(), event_name='data')