Exemplo n.º 1
0
    def test_exclude_splits(self):
        with IQFeedHistoryProvider() as provider:
            # single index
            f = BarsInPeriodFilter(ticker="PLUS",
                                   bgn_prd=datetime.datetime(2017, 3, 31),
                                   end_prd=datetime.datetime(2017, 4, 5),
                                   interval_len=3600,
                                   ascend=True,
                                   interval_type='s',
                                   max_ticks=100)

            data = provider.request_data(f, sync_timestamps=False)
            data['include'] = True
            data = data['include'].copy()

            conn = iq.QuoteConn()
            conn.connect()
            try:
                sd = get_splits_dividends(f.ticker, conn=conn)
            finally:
                conn.disconnect()

            result = exclude_splits(data, sd['value'].xs('split',
                                                         level='type'), 10)

            self.assertTrue(result[~result].size == 10)

            # multiindex
            f = BarsInPeriodFilter(ticker=["PLUS", "IBM"],
                                   bgn_prd=datetime.datetime(2017, 3, 31),
                                   end_prd=datetime.datetime(2017, 4, 5),
                                   interval_len=3600,
                                   ascend=True,
                                   interval_type='s',
                                   max_ticks=100)

            data = provider.request_data(f, sync_timestamps=False)
            data['include'] = True
            data = data['include'].copy()

            conn = iq.QuoteConn()
            conn.connect()
            try:
                sd = get_splits_dividends(f.ticker, conn=conn)
            finally:
                conn.disconnect()

            result = exclude_splits(data, sd['value'].xs('split',
                                                         level='type'), 10)

            self.assertTrue(result[~result].size == 10)
Exemplo n.º 2
0
    def process_history_bar(self, bar_data: np.array) -> None:
        bar_data = (bar_data[0] if len(bar_data) == 1 else bar_data).copy()

        symbol = bar_data[0].decode("ascii")

        if self.watched_symbols[symbol] is None:
            self.watched_symbols[symbol] = list()

        self.watched_symbols[symbol].append(bar_data)

        if len(self.watched_symbols[symbol]) == self.mkt_snapshot_depth:
            df = self._bars_to_df(self.watched_symbols[symbol])

            if self.adjust_history:
                adjust_df(df, get_splits_dividends(symbol,
                                                   self.streaming_conn))

            self.watched_symbols[symbol] = df

            self.listeners({
                'type': 'history_bars',
                'data': df,
                'symbol': symbol,
                'interval_type': self.interval_type,
                'interval_len': self.interval_len
            })
Exemplo n.º 3
0
    def next_batch(self):
        for f in self.filter_provider:
            logging.getLogger(__name__).info("Loading data for filter " + str(f))

            d = self.request_data(f, sync_timestamps=self.sync_timestamps)

            if d is None:
                break

            if isinstance(d.index, pd.MultiIndex) and (self.adjust_data or self.timestamp_first):
                d = d.swaplevel(0, 1)
                d.sort_index(inplace=True)

            if self.adjust_data:
                adjustments = get_splits_dividends(symbol=f.ticker, conn=self.streaming_conn)
                adjust_df(data=d, adjustments=adjustments)

                if isinstance(d.index, pd.MultiIndex) and not self.timestamp_first:
                    d = d.swaplevel(0, 1)

            self.current_filter = f
            self.current_batch = d

            yield d, f

        self.listeners({'type': 'no_data'})
Exemplo n.º 4
0
    def test_update_adjustments(self):
        table_name = 'adjustments_test'
        url = 'postgresql://*****:*****@localhost:5432/test'

        con = psycopg2.connect(url)
        con.autocommit = True

        try:
            adjustments = get_splits_dividends({'IBM', 'AAPL', 'GOOG', 'MSFT'})

            cur = con.cursor()

            cur.execute(create_json_data.format(table_name))

            insert_df_json(con, table_name, adjustments)

            now = datetime.datetime.now()

            df = request_adjustments(
                con,
                table_name,
                symbol=['IBM', 'AAPL', 'MSFT', 'GOOG'],
                bgn_prd=datetime.datetime(year=now.year - 30,
                                          month=now.month,
                                          day=now.day),
                end_prd=datetime.datetime(year=now.year + 2,
                                          month=now.month,
                                          day=now.day),
                provider='iqfeed')

            self.assertFalse(df.empty)
            assert_frame_equal(adjustments, df)
        finally:
            con.cursor().execute(
                "DROP TABLE IF EXISTS {0};".format(table_name))
Exemplo n.º 5
0
    def test_exclude_splits_performance(self):
        logging.basicConfig(level=logging.DEBUG)

        batch_len = 15000
        batch_width = 4000

        now = datetime.datetime.now()
        with IQFeedHistoryProvider() as provider:
            df1 = provider.request_data(BarsFilter(ticker="PLUS",
                                                   interval_len=3600,
                                                   interval_type='s',
                                                   max_bars=batch_len),
                                        sync_timestamps=False)

            df = {'PLUS': df1}
            for i in range(batch_width):
                df['PLUS_' + str(i)] = df1.sample(
                    random.randint(int(len(df1) / 3),
                                   len(df1) - 1))

            df = pd.concat(df, sort=True)
            df.index.set_names(['symbol', 'timestamp'], inplace=True)
            df['include'] = True
            data = df['include']

            conn = iq.QuoteConn()
            conn.connect()
            try:
                sd = get_splits_dividends("PLUS", conn=conn).xs('split',
                                                                level='type')
            finally:
                conn.disconnect()

            splits = list()
            for l in df.index.levels[0]:
                ind_cp = sd.index.set_levels([l], level=1)
                for i, v in enumerate(sd):
                    ind_cp.values[i] = (sd.index.values[i][0], l,
                                        sd.index.values[i][2])

                cp = pd.DataFrame(data=sd.values, index=ind_cp)

                splits.append(cp)

            splits = pd.concat(splits, sort=True)

            logging.getLogger(__name__).debug('Random data generated in ' +
                                              str(datetime.datetime.now() -
                                                  now) + ' with shapes ' +
                                              str(df.shape))

            now = datetime.datetime.now()

            result = exclude_splits(data, splits, 10)

            logging.getLogger(__name__).debug('Task done in ' +
                                              str(datetime.datetime.now() -
                                                  now) + ' with shapes ' +
                                              str(result.shape))

            self.assertTrue(result[~result].size > 10)
            self.assertTrue(result[result].size > 0)
Exemplo n.º 6
0
    parser.add_argument('-url',
                        type=str,
                        default=os.environ['POSTGRESQL_CACHE'],
                        help="PostgreSQL connection string")
    parser.add_argument(
        '-symbols_file',
        type=str,
        default=None,
        help=
        "location to locally saved symbols file (to prevent downloading it every time)"
    )

    args = parser.parse_args()

    con = psycopg2.connect(args.url)
    con.autocommit = True

    all_symbols = set(
        iqutil.get_symbols(symbols_file=args.symbols_file).keys())

    with IQFeedLevel1Listener(listeners=SyncListeners(),
                              fire_ticks=False) as listener:
        adjustments = get_splits_dividends(all_symbols, listener.conn)

        table_name = 'json_data'
        cur = con.cursor()
        cur.execute("DROP TABLE IF EXISTS {0};".format(table_name))
        cur.execute(create_json_data.format(table_name))

        insert_df_json(con, table_name, adjustments)