Example #1
0
    def documents():
        """Get level 1 quotes and trades for ticker for seconds seconds."""
        quote_conn = iq.QuoteConn(name="pyiqfeed-Example-lvl1")
        quote_listener = iq.VerboseQuoteListener("Level 1 Listener")
        quote_conn = iq.QuoteConn(name="pyiqfeed-Example-lvl1")
        quote_conn.add_listener(quote_listener)
        with iq.ConnConnector([quote_conn]) as connector:
            all_fields = sorted(list(iq.QuoteConn.quote_msg_map.keys()))
            quote_conn.select_update_fieldnames(all_fields)
            quote_conn.watch(ticker)
            import time

            time.sleep(seconds)
Example #2
0
    def test_exclude_splits(self):
        with IQFeedHistoryProvider() as provider:
            # single index
            f = BarsInPeriodFilter(ticker="PLUS",
                                   bgn_prd=datetime.datetime(2017, 3, 31),
                                   end_prd=datetime.datetime(2017, 4, 5),
                                   interval_len=3600,
                                   ascend=True,
                                   interval_type='s',
                                   max_ticks=100)

            data = provider.request_data(f, sync_timestamps=False)
            data['include'] = True
            data = data['include'].copy()

            conn = iq.QuoteConn()
            conn.connect()
            try:
                sd = get_splits_dividends(f.ticker, conn=conn)
            finally:
                conn.disconnect()

            result = exclude_splits(data, sd['value'].xs('split',
                                                         level='type'), 10)

            self.assertTrue(result[~result].size == 10)

            # multiindex
            f = BarsInPeriodFilter(ticker=["PLUS", "IBM"],
                                   bgn_prd=datetime.datetime(2017, 3, 31),
                                   end_prd=datetime.datetime(2017, 4, 5),
                                   interval_len=3600,
                                   ascend=True,
                                   interval_type='s',
                                   max_ticks=100)

            data = provider.request_data(f, sync_timestamps=False)
            data['include'] = True
            data = data['include'].copy()

            conn = iq.QuoteConn()
            conn.connect()
            try:
                sd = get_splits_dividends(f.ticker, conn=conn)
            finally:
                conn.disconnect()

            result = exclude_splits(data, sd['value'].xs('split',
                                                         level='type'), 10)

            self.assertTrue(result[~result].size == 10)
Example #3
0
def getQuoteData(ticker: str):
    """Get level 1 quotes and trades for ticker for seconds seconds."""
    fundamentals = ""
    quote_conn = iq.QuoteConn(name="pyiqfeed-Example-lvl1")
    quote_listener = iq.VerboseQuoteListener("Level 1 Listener")
    quote_conn.add_listener(quote_listener)
    with iq.ConnConnector([quote_conn]) as connector:
        all_fields = sorted(list(iq.QuoteConn.quote_msg_map.keys()))
        quote_conn.select_update_fieldnames(
            ["Bid", "Bid Time", "Ask", "Ask Time"])
        quote_conn.watch(ticker)
        t_end = time.time() + 5 * 60
        while time.time() < t_end:
            fundamentals = quote_conn.fundamentals
            summary = quote_conn.summary
            if (fundamentals != "" and summary != ""):
                break
        quote_conn.unwatch(ticker)
        quote_conn.remove_listener(quote_listener)
        if (len(summary) > 0):
            summary = summary[0]
    if (len(summary) > 0):
        return fundamentals, summary[0]
    else:
        return ""
Example #4
0
    def __enter__(self):
        super().__enter__()

        # streaming conn for fundamental data
        self.streaming_conn = iq.QuoteConn()
        self.streaming_conn.connect()

        return self
Example #5
0
def get_trades_only(ticker: str, seconds: int):
    """Get level 1 quotes and trades for ticker for seconds seconds."""

    quote_conn = iq.QuoteConn(name="pyiqfeed-Example-trades-only")
    quote_listener = iq.VerboseQuoteListener("Trades Listener")
    quote_conn.add_listener(quote_listener)

    with iq.ConnConnector([quote_conn]) as connector:
        quote_conn.trades_watch(ticker)
        time.sleep(seconds)
        quote_conn.unwatch(ticker)
Example #6
0
def get_regional_quotes(ticker: str, seconds: int):
    """Get level 1 quotes and trades for ticker for seconds seconds."""

    quote_conn = iq.QuoteConn(name="pyiqfeed-Example-regional")
    quote_listener = iq.VerboseQuoteListener("Regional Listener")
    quote_conn.add_listener(quote_listener)

    with iq.ConnConnector([quote_conn]) as connector:
        quote_conn.regional_watch(ticker)
        time.sleep(seconds)
        quote_conn.regional_unwatch(ticker)
Example #7
0
    def __enter__(self):
        if self._own_conn:
            launch_service()
            self.conn = iq.QuoteConn()
            self.conn.add_listener(self)
            self.conn.connect()
        else:
            self.conn.add_listener(self)

        self.queue = queue.Queue()

        return self
Example #8
0
def get_level_1_quotes_and_trades(ticker: str, seconds: int):
    """Get level 1 quotes and trades for ticker for seconds seconds."""

    quote_conn = iq.QuoteConn(name="pyiqfeed-Example-lvl1")
    quote_listener = iq.VerboseQuoteListener("Level 1 Listener")
    quote_conn.add_listener(quote_listener)
    with iq.ConnConnector([quote_conn]) as connector:
        all_fields = sorted(list(iq.QuoteConn.quote_msg_map.keys()))
        quote_conn.select_update_fieldnames(all_fields)
        quote_conn.watch(ticker)
        time.sleep(seconds)
        quote_conn.unwatch(ticker)
        quote_conn.remove_listener(quote_listener)
    def test_request_ohlc(self):
        listeners = AsyncListeners()

        with IQFeedHistoryProvider(num_connections=2) as history:
            streaming_conn = iq.QuoteConn()
            streaming_conn.connect()

            end_prd = datetime.datetime(2017, 5, 1)

            # test single symbol request
            filters = (BarsInPeriodFilter(ticker="IBM",
                                          bgn_prd=datetime.datetime(
                                              2017, 4, 1),
                                          end_prd=end_prd,
                                          interval_len=3600,
                                          ascend=True,
                                          interval_type='s'),
                       BarsInPeriodFilter(ticker="AAPL",
                                          bgn_prd=datetime.datetime(
                                              2017, 4, 1),
                                          end_prd=end_prd,
                                          interval_len=3600,
                                          ascend=True,
                                          interval_type='s'),
                       BarsInPeriodFilter(ticker="AAPL",
                                          bgn_prd=datetime.datetime(
                                              2017, 4, 1),
                                          end_prd=end_prd,
                                          interval_len=600,
                                          ascend=True,
                                          interval_type='s'))

            update_splits_dividends(client=self._client,
                                    fundamentals=get_fundamentals(
                                        {'IBM', 'AAPL'},
                                        streaming_conn).values())
            adjusted = list()

            for f in filters:
                datum = history.request_data(f, sync_timestamps=False)
                datum.drop('timestamp', axis=1, inplace=True)
                datum['interval'] = str(f.interval_len) + '_' + f.interval_type
                self._client.write_points(datum,
                                          'bars',
                                          protocol='line',
                                          tag_columns=['symbol', 'interval'],
                                          time_precision='s')
                datum.drop('interval', axis=1, inplace=True)

                datum = history.request_data(f, sync_timestamps=False)

                adjust_df(
                    datum, get_adjustments(client=self._client,
                                           symbol=f.ticker))
                adjusted.append(datum)

                cache_requests = InfluxDBOHLCRequest(
                    client=self._client,
                    interval_len=f.interval_len,
                    interval_type=f.interval_type)
                _, test_data = cache_requests.request(symbol=f.ticker)
                adjust_df(
                    test_data,
                    get_adjustments(client=self._client, symbol=f.ticker))
                del datum['total_volume']
                del datum['number_of_trades']
                assert_frame_equal(datum, test_data)

            for datum, f in zip(adjusted, filters):
                cache_requests = InfluxDBOHLCRequest(
                    client=self._client,
                    interval_len=f.interval_len,
                    interval_type=f.interval_type)
                _, test_data = cache_requests.request(symbol=f.ticker)
                _, test_data_limit = cache_requests.request(
                    symbol=f.ticker,
                    bgn_prd=f.bgn_prd + relativedelta(days=7),
                    end_prd=f.end_prd - relativedelta(days=7))

                self.assertGreater(len(test_data_limit), 0)
                self.assertLess(len(test_data_limit), len(test_data))

            # test multisymbol request
            requested_data = history.request_data(BarsInPeriodFilter(
                ticker=["AAPL", "IBM"],
                bgn_prd=datetime.datetime(2017, 4, 1),
                end_prd=end_prd,
                interval_len=3600,
                ascend=True,
                interval_type='s'),
                                                  sync_timestamps=False)
            requested_data = requested_data.swaplevel(0, 1).sort_index()
            del requested_data['total_volume']
            del requested_data['number_of_trades']

            cache_requests = InfluxDBOHLCRequest(client=self._client,
                                                 interval_len=3600,
                                                 listeners=listeners)
            _, test_data = cache_requests.request(
                symbol=['IBM', 'AAPL', 'TSG'],
                bgn_prd=datetime.datetime(2017, 4, 1),
                end_prd=end_prd)
            assert_frame_equal(requested_data, test_data)

            # test any symbol request
            requested_data = history.request_data(BarsInPeriodFilter(
                ticker=["AAPL", "IBM"],
                bgn_prd=datetime.datetime(2017, 4, 1),
                end_prd=end_prd,
                interval_len=3600,
                ascend=True,
                interval_type='s'),
                                                  sync_timestamps=False)
            requested_data = requested_data.swaplevel(0, 1).sort_index()

            del requested_data['total_volume']
            del requested_data['number_of_trades']

            e = threading.Event()

            def listen(event):
                if event['type'] == 'cache_result':
                    assert_frame_equal(requested_data, event['data'][0])
                    e.set()

            listeners += listen

            listeners({
                'type': 'request_ohlc',
                'data': {
                    'bgn_prd': datetime.datetime(2017, 4, 1),
                    'end_prd': end_prd
                }
            })

            e.wait()

            streaming_conn.disconnect()
Example #10
0
    def test_exclude_splits_performance(self):
        logging.basicConfig(level=logging.DEBUG)

        batch_len = 15000
        batch_width = 4000

        now = datetime.datetime.now()
        with IQFeedHistoryProvider() as provider:
            df1 = provider.request_data(BarsFilter(ticker="PLUS",
                                                   interval_len=3600,
                                                   interval_type='s',
                                                   max_bars=batch_len),
                                        sync_timestamps=False)

            df = {'PLUS': df1}
            for i in range(batch_width):
                df['PLUS_' + str(i)] = df1.sample(
                    random.randint(int(len(df1) / 3),
                                   len(df1) - 1))

            df = pd.concat(df, sort=True)
            df.index.set_names(['symbol', 'timestamp'], inplace=True)
            df['include'] = True
            data = df['include']

            conn = iq.QuoteConn()
            conn.connect()
            try:
                sd = get_splits_dividends("PLUS", conn=conn).xs('split',
                                                                level='type')
            finally:
                conn.disconnect()

            splits = list()
            for l in df.index.levels[0]:
                ind_cp = sd.index.set_levels([l], level=1)
                for i, v in enumerate(sd):
                    ind_cp.values[i] = (sd.index.values[i][0], l,
                                        sd.index.values[i][2])

                cp = pd.DataFrame(data=sd.values, index=ind_cp)

                splits.append(cp)

            splits = pd.concat(splits, sort=True)

            logging.getLogger(__name__).debug('Random data generated in ' +
                                              str(datetime.datetime.now() -
                                                  now) + ' with shapes ' +
                                              str(df.shape))

            now = datetime.datetime.now()

            result = exclude_splits(data, splits, 10)

            logging.getLogger(__name__).debug('Task done in ' +
                                              str(datetime.datetime.now() -
                                                  now) + ' with shapes ' +
                                              str(result.shape))

            self.assertTrue(result[~result].size > 10)
            self.assertTrue(result[result].size > 0)