def handle_bar(context, bar_dict):
    logger.info(bar_dict[context.s1])
    bar = bar_dict[context.s1]
    now_int = convert_dt_to_int(context.now)
    bar_int = convert_dt_to_int(bar.datetime)
    try:
        assert - timedelta(minutes=1) < bar.datetime - context.now < timedelta(minutes=1)
    except AssertionError as e:
        print(now_int)
        print(bar)
        raise e
    frequencies = ["1m", "5m", "15m"]
    lengths = [10, 100]
    for l, f in itertools.product(lengths, frequencies):
        df1 = pd.DataFrame(history_bars(context.s1, l, f))
        df2 = pd.DataFrame(history_bars(context.s1, l, f, include_now=True))
        try:
            assert df2["datetime"].iloc[-1] == bar_int
            assert df1["datetime"].iloc[-1] == df2["datetime"].iloc[-1] or \
                   df1["datetime"].iloc[-1] == df2["datetime"].iloc[-2]
        except AssertionError as e:
            print(now_int)
            print(bar_int)
            print(df1)
            print(df2)
            raise e

    # test order
    if not context.fired:
        # order_percent并且传入1代表买入该股票并且使其占有投资组合的100%
        order_percent(context.s1, 1)
        context.fired = True
    else:
        order_percent(context.s1, 0)
        context.fired = False
 def test_get_bar(self):
     instrument = self.get_stock()
     dt = self.get_last_trading_date()
     a1 = self.source.get_bar(instrument, dt, "1d")
     print(a1)
     a2 = self.source.get_bar(instrument, dt, "1m")
     print(a2)
     assert convert_dt_to_int(dt) == a1[0]
     assert convert_dt_to_int(dt) == a2[0]
 def test_start_length_without_history(self):
     end = datetime.now()
     start = datetime.combine(end.date(), time=time(hour=9, minute=31))
     ei = convert_dt_to_int(end)
     si = convert_dt_to_int(start)
     frequency = "1m"
     length = 2
     data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, length=length)
     dts = data["datetime"]
     if len(dts) and start <= end:
         assert dts[0] == si
         assert len(dts) == length
Exemple #4
0
 def raw_history_bars(self, instrument, frequency, start_dt=None, end_dt=None, length=None):
     env = Environment.get_instance()
     now = env.calendar_dt
     today = now.date()
     today_int = convert_date_to_date_int(today)
     yesterday = datetime.combine(env.data_proxy.get_previous_trading_date(today),
                                  time=time(hour=23, minute=59, second=59))
     history_bars = EMPTY_BARS
     today_bars = EMPTY_BARS
     if end_dt and start_dt:
         end_dt = min(now, end_dt)
         if start_dt > end_dt:
             return EMPTY_BARS
         if end_dt.date == today:
             start_time = convert_dt_to_int(start_dt) % 1000000 if start_dt.date() == today else None
             end_time = convert_dt_to_int(end_dt) % 1000000
             today_bars = self._inday_bars.bars(instrument, frequency, today_int,
                                                start_time, end_time)
         if start_dt.date() < today:
             history_bars = self._hist_source.raw_history_bars(
                 instrument, frequency,
                 start_dt=start_dt,
                 end_dt=min(end_dt, yesterday)
             )
     elif start_dt and length:
         if start_dt.date() > today:
             return EMPTY_BARS
         if start_dt.date() < today:
             history_bars = self._hist_source.raw_history_bars(
                 instrument, frequency, start_dt=start_dt, length=length)
         left = length - len(history_bars) if history_bars is not None else length
         start_time = convert_dt_to_int(start_dt) % 1000000 if start_dt.date() == today else None
         today_bars = self._inday_bars.get_bars(instrument, frequency,
                                                today_int, start_time)[:left]
     elif end_dt and length:
         end_dt = min(now, end_dt)
         if end_dt.date() == today:
             end_time = convert_dt_to_int(end_dt) % 1000000
             today_bars = self._inday_bars.get_bars(instrument, frequency, today_int,
                                                    end_time=end_time)[-length:]
         left = length - len(today_bars) if today_bars is not None else length
         if left > 0:
             history_bars = self._hist_source.raw_history_bars(
                 instrument, frequency, end_dt=min(end_dt, yesterday), length=left)
     else:
         raise RuntimeError
     if history_bars is not None and today_bars is not None:
         return np.concatenate([history_bars, today_bars])
     elif history_bars is not None:
         return history_bars
     else:
         return today_bars
 def test_start_end_without_history(self):
     end = datetime.now()
     start = datetime.combine(end.date(), time=time(hour=9, minute=33))
     ei = convert_dt_to_int(end)
     si = convert_dt_to_int(start)
     frequency = "1m"
     data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, end_dt=end)
     dts = data["datetime"]
     if start > end:
         assert len(data) == 0
     else:
         assert dts[0] == si
         assert 0 < dts[-1] - ei < 100
 def test_start_end_with_history(self):
     end = datetime.now()
     start = datetime.combine(end.date() - timedelta(days=1), time(hour=9, minute=33))
     frequency = "1m"
     ei = convert_dt_to_int(end)
     si = convert_dt_to_int(start)
     today_i = ei // 1000000 * 1000000
     yesterday_i = si // 1000000 * 1000000
     data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, end_dt=end)
     dts = data["datetime"]
     assert dts[dts > today_i][0] == today_i + 93100
     assert dts[dts < today_i][-1] == yesterday_i + 150000
     assert dts[0] == si
     assert 0 < dts[-1] - ei < 100
 def test_start_length_with_history(self):
     end = datetime.now()
     start = datetime.combine(end.date() - timedelta(days=1), time=time(hour=9, minute=31))
     ei = convert_dt_to_int(end)
     si = convert_dt_to_int(start)
     today_i = ei // 1000000 * 1000000
     yesterday_i = si // 1000000 * 1000000
     length = 300
     frequency = "1m"
     data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, length=length)
     dts = data["datetime"]
     assert dts[dts > today_i][0] == today_i + 93100
     assert dts[dts < today_i][-1] == yesterday_i + 150000
     assert dts[0] == si
     assert len(dts) == length
    def history_bars(self,
                     instrument,
                     bar_count,
                     frequency,
                     fields,
                     dt,
                     skip_suspended=True,
                     include_now=False,
                     adjust_type='pre',
                     adjust_orig=None):
        # TODO return adjusted bars, added field 'limit_up', 'limit_down'
        if frequency == '1d':
            return super(QUANTAXISKDataSource,
                         self).history_bars(instrument, bar_count, frequency,
                                            fields, dt, skip_suspended,
                                            include_now, adjust_type,
                                            adjust_orig)

        if frequency != '1m':
            raise NotImplementedError

        self._cache_count_bars(instrument=instrument,
                               dt=dt,
                               bar_count=bar_count)
        self._sort_cache(instrument.order_book_id)
        df = self._cache[instrument.order_book_id]
        if not len(df):
            return df
        df = df[df['datetime'] <= convert_dt_to_int(dt)]
        if len(df) > bar_count:
            df = df[-bar_count:]
        return df
    def get_bar(self,
                instrument,
                dt,
                frequency,
                fields=[],
                adjust_type='none',
                adjust_orig=None):
        # TODO return adjusted bars, added field 'limit_up', 'limit_down'
        if frequency == '1d':
            return super(QUANTAXISKDataSource, self).get_bar(
                instrument, dt, frequency)  #the returned type is numy.void
        if frequency != '1m':
            raise NotImplementedError

        order_book_id = instrument.order_book_id

        if (order_book_id not in self._cache) or (
                dt.strftime('%Y-%m-%d')
                not in self._cached_dates[order_book_id]):
            self._cache_period_bars(instrument,
                                    start_dt=datetime(dt.year, dt.month,
                                                      dt.day, 9),
                                    end_dt=datetime(dt.year, dt.month, dt.day,
                                                    18))
            self._shrink_cache(
                order_book_id
            )  #TODO ensure shrink will not remove the wanted bar, if get_bar is always used to get the latest bar,this won't be a problem

        try:
            dtint = convert_dt_to_int(dt)
            df = self._cache[order_book_id]
            return df[df['datetime'] == dtint].iloc[0].to_dict()
        except:
            return None
 def get_bar(self, instrument, dt, frequency):
     """
     :type instrument: rqalpha.model.instrument.instrument
     :type dt: datetime.datetime
     :param str frequency: `1d` or `1m`
     :return: numpy.ndarray
     """
     if frequency == '1d':
         bars = self.get_stock_data_from_mongo(instrument.order_book_id,
                                               CycType.CYC_DAY)
         if bars is None:
             return
         dt = convert_date_to_int(dt)
         pos = bars['datetime'].searchsorted(dt)
         if pos >= len(bars) or bars['datetime'][pos] != dt:
             return None
         return bars[pos]
     elif frequency == '1m':
         bars = self.get_stock_data_from_mongo(instrument.order_book_id,
                                               CycType.CYC_MINUTE)
         if bars is None:
             return
         dt = convert_dt_to_int(dt)
         pos = bars['datetime'].searchsorted(dt)
         if pos >= len(bars) or bars['datetime'][pos] != dt:
             return None
         return bars[pos]
     else:
         raise NotImplementedError
Exemple #11
0
 def _shrink_cache(self,order_book_id):
     self._sort_cache(order_book_id)
     while (len(self._cache[order_book_id]) > self._cache_size and len(self._cached_dates[order_book_id]) > 1):
         df = self._cache[order_book_id]
         dt = convert_dt_to_int(datetime.strptime(self._cached_dates[order_book_id][1], '%Y-%m-%d'))
         self._cache[order_book_id] = df[df['datetime'] > dt]
         self._cached_dates[order_book_id] = self._cached_dates[order_book_id][1:]
Exemple #12
0
 def get_bar(self, instrument, dt, frequency):
     num = int(frequency[:-1])
     freq = frequency[-1]
     if self.is_base_frequency(instrument, frequency):
         bars = self.raw_history_bars(instrument,
                                      frequency,
                                      end_dt=dt,
                                      length=1)
     else:
         if freq == "m":
             bars = self.raw_history_bars(instrument,
                                          "1" + freq,
                                          end_dt=dt,
                                          length=num)
             bars = self._resample_bars(bars, frequency)
         else:
             return super(OddFrequencyDataSource,
                          self).get_bar(instrument, dt, frequency)
     if bars is None or not bars.size:
         return super(OddFrequencyDataSource,
                      self).get_bar(instrument, dt, frequency)
     else:
         dti = convert_dt_to_int(dt)
         # TODO num * TIME_TOLERANCE[freq] maybe some problem in "d" frequency
         if abs(bars[-1]["datetime"] - dti) < num * TIME_TOLERANCE[freq]:
             return bars[-1]
         else:
             data = bars[-1].copy()
             data["datetime"] = dti
             data["open"] = data["close"]
             data["high"] = data["close"]
             data["low"] = data["close"]
             data["volume"] = 0
             return data
 def test_raw_history_bars(self):
     source = MongoCacheDataSource(self.path, self.mongo_url)
     start = parse("2012-06-01 9:31:00")
     si = convert_dt_to_int(start)
     frequency = "1m"
     first = source.raw_history_bars(self._instrument,
                                     frequency,
                                     start_dt=start,
                                     length=source.CACHE_LENGTH)
     s, e, l = self.get_cache_info(source, frequency)
     assert s == si and l == source.CACHE_LENGTH
     data = source.raw_history_bars(self._instrument,
                                    frequency,
                                    end_dt=convert_int_to_datetime(
                                        first["datetime"][-1]),
                                    length=source.CACHE_LENGTH)
     s, e, l = self.get_cache_info(source, frequency)
     assert s == si and l == source.CACHE_LENGTH
     next_ = source.raw_history_bars(self._instrument,
                                     frequency,
                                     start_dt=convert_int_to_datetime(
                                         first["datetime"][5]),
                                     length=source.CACHE_LENGTH)
     s, e, l = self.get_cache_info(source, frequency)
     assert s == si and l == source.CACHE_LENGTH * 2
     assert (first == data).all()
     print(pd.DataFrame(next_))
Exemple #14
0
    def _cache_period_bars(self,
                           instrument,
                           start_dt,
                           end_dt,
                           frequency='1m',
                           fields=[],
                           adjust_type='pre',
                           adjust_orig=None):
        # # data at start_dt and end_dt are included
        if frequency != '1m':
            raise NotImplementedError

        order_book_id = instrument.order_book_id

        sec = rq2gm[order_book_id[-5:]] + order_book_id[:6]
        sec_dfs, trading_dates = db.get_data_GM(sec_list=[sec],
                                                start_dt=start_dt,
                                                end_dt=end_dt)
        if (not sec_dfs) or (not len(sec_dfs[sec])):
            df = pd.DataFrame()
        else:
            df = sec_dfs[sec]
            df['datetime'] = df['datetime'].apply(
                lambda x: convert_dt_to_int(x))

        if order_book_id in self._cache:
            self._cache[order_book_id] = pd.concat(
                [self._cache[order_book_id], df], ignore_index=True)
        else:
            self._cache[order_book_id] = df
        try:
            self._cached_dates[order_book_id] += trading_dates
            # self._cached_dates[order_book_id] = list(set(self._cached_dates[order_book_id]))
        except:
            self._cached_dates[order_book_id] = trading_dates
Exemple #15
0
 def raw_history_bars(self,
                      start_dt=None,
                      end_dt=None,
                      length=None,
                      updated=False):
     bars = self._data
     if bars is not None:
         if end_dt:
             if self._frequency.endswith("d"):
                 # 日线策略替换为收盘时间
                 end_dt = end_dt.replace(hour=15, minute=00)
             end_dti = np.uint64(convert_dt_to_int(end_dt))
             end_pos = bars["datetime"].searchsorted(end_dti, side="right")
         if start_dt:
             start_dti = np.uint64(convert_dt_to_int(start_dt))
             start_pos = bars["datetime"].searchsorted(start_dti,
                                                       side="left")
         if start_dt and end_dt:
             if end_pos < len(bars) or bars[-1]["datetime"] == end_dti:
                 if start_pos == 0 and bars[0][
                         "datetime"] != start_dti:  # start datetime is early than cache
                     return None
                 else:
                     return bars[start_pos:end_pos]
                     # else update the cache
         elif length is not None:
             if end_dt:
                 if end_pos < len(bars) or bars[-1]["datetime"] == end_dti:
                     if end_pos - length < 0:
                         return None
                     else:
                         return bars[end_pos - length:end_pos]
                         # else update the cache
             elif start_dt:
                 if start_pos == 0 and bars[0]["datetime"] != start_dti:
                     return None
                 if start_pos + length <= len(bars):
                     return bars[start_pos:start_pos + length]
                     # else update the cache
     # update the cache
     if not self._finished and not updated:
         self._source.update_cache(self, end_dt or start_dt)
         return self.raw_history_bars(start_dt,
                                      end_dt,
                                      length,
                                      updated=True)
     return None
 def test_end_length_without_history(self):
     end = datetime.now()
     ei = convert_dt_to_int(end)
     frequency = "1m"
     data = self.source.raw_history_bars(self.instrument, frequency, end_dt=end,
                                         length=2)
     assert len(data) == 2
     assert data["datetime"][-1] - ei < 100
Exemple #17
0
 def get_bar(self, instrument, dt, frequency):
     try:
         quote = self.realtime_quotes.loc[instrument.order_book_id]
         quote = quote[bar_fields].to_dict()
         quote['datetime'] = convert_dt_to_int(dt)
         return quote
     except Exception as e:
         print(e)
     return None
 def get_bar(self, instrument, dt, frequency):
     try:
         quote = self._env.price_board.snapshot.loc[
             instrument.order_book_id]
         quote = quote[bar_fields].to_dict()
         quote['datetime'] = convert_dt_to_int(dt)
         return quote
     except Exception as e:
         user_system_log.warn(repr(e))
     return None
    def history_bars(self,
                     instrument,
                     bar_count,
                     frequency,
                     fields,
                     dt,
                     skip_suspended=True,
                     include_now=False):
        """
        :type instrument: rqalpha.model.instrument.instrument
        :type bar_count: int
        :param str frequency: `1d` or `1m`
        :type fields: str or list[str]
        :type dt: datetime.datetime
        :return: numpy.ndarray
        """
        if frequency == '1d':
            bars = self.get_stock_data_from_mongo(instrument.order_book_id,
                                                  CycType.CYC_DAY)

            if bars is None or not self._are_fields_valid(
                    fields, bars.dtype.names):
                return None

            if skip_suspended and instrument.type == 'CS':
                bars = bars[bars['volume'] > 0]

            dt = convert_date_to_int(dt)
            i = bars['datetime'].searchsorted(dt, side='right')
            left = i - bar_count if i >= bar_count else 0
            if fields is None:
                return bars[left:i]
            else:
                return bars[left:i][fields]
        elif frequency == '1m':
            bars = self.get_stock_data_from_mongo(instrument.order_book_id,
                                                  CycType.CYC_MINUTE)

            if bars is None or not self._are_fields_valid(
                    fields, bars.dtype.names):
                return None

            # if skip_suspended and instrument.type == 'CS':
            #     bars = bars[bars['volume'] > 0]

            dt = convert_dt_to_int(dt)
            i = bars['datetime'].searchsorted(dt, side='right')
            left = i - bar_count if i >= bar_count else 0
            if fields is None:
                return bars[left:i]
            else:
                return bars[left:i][fields]
        else:
            raise NotImplementedError
 def test_suspended(self):
     fields = ["datetime", "close", "low", "high", "open", "volume"]
     instrument = self.get_letv()
     dt = datetime(year=2018, month=1, day=24, hour=15)
     for l, f in itertools.product(lengths, frequencies):
         data = self.source.history_bars(instrument, l, f, fields, dt, adjust_type=None)
         df = pd.DataFrame(data)
         print(instrument, l, f)
         print(df)
         assert set(df.columns) == set(fields)
         assert len(df) == l
         assert convert_dt_to_int(dt) == df["datetime"].iloc[-1]
 def test_stock(self):
     fields = ["datetime", "close", "low", "high", "open", "volume"]
     instrument = self.get_stock()
     dt = self.get_last_trading_date()
     for l, f in itertools.product(lengths, frequencies):
         data = self.source.history_bars(instrument, l, f, fields, dt, adjust_type=None)
         df = pd.DataFrame(data)
         print(instrument, l, f)
         print(df)
         assert set(df.columns) == set(fields)
         assert len(df) == l
         assert convert_dt_to_int(dt) == df["datetime"].iloc[-1]
    def get_stock_data_from_mongo(self, code, cyc_type):
        """
        :param str code: WindCode
        :param cyc_type: Type from CycType
        :return: numpy.ndarray
        """
        logger.info('Load data from MongoDB: Code = {}, CycType = {}'.format(
            code, cyc_type))
        cursor = self.db[get_col_name(code)].find({
            'cycType': cyc_type
        }, {
            '_id': False,
            'date': True,
            'open': True,
            'close': True,
            'high': True,
            'low': True,
            'volume': True,
            'amount': True
        }).sort("date", pymongo.ASCENDING)

        pre_close = np.round(cursor.next()['close'], CONVERTER['close'].round)
        data_num = cursor.count()
        dtype = np.dtype([(f, FIELDS[f]) for f in FIELDS.keys()])
        bars = np.zeros(shape=(data_num, ), dtype=dtype)

        i = 0
        for doc in cursor:
            if cyc_type == CycType.CYC_DAY:
                bars[i]['datetime'] = convert_date_to_int(doc['date'])
                bars[i]['limit_up'] = np.round(
                    np.floor(pre_close * 11000) / 10000,
                    CONVERTER['limit_up'].round)
                bars[i]['limit_down'] = np.round(
                    np.ceil(pre_close * 9000) / 10000,
                    CONVERTER['limit_down'].round)
            elif cyc_type == CycType.CYC_MINUTE:
                bars[i]['datetime'] = convert_dt_to_int(doc['date'])
            else:
                raise NotImplementedError
            bars[i]['open'] = np.round(doc['open'], CONVERTER['open'].round)
            bars[i]['close'] = np.round(doc['close'], CONVERTER['close'].round)
            bars[i]['high'] = np.round(doc['high'], CONVERTER['high'].round)
            bars[i]['low'] = np.round(doc['low'], CONVERTER['low'].round)
            bars[i]['volume'] = doc['volume']
            bars[i]['total_turnover'] = doc['amount']
            pre_close = doc['close']
            i += 1
        logger.info(
            'Load data from MongoDB finished: Code = {}, CycType = {}'.format(
                code, cyc_type))
        return bars
 def test_end_length_with_history(self):
     try:
         start = None
         end = datetime.now()
         ei = convert_dt_to_int(end)
         today_i = ei // 1000000 * 1000000
         yesterday_i = convert_dt_to_int((end - timedelta(days=1)).replace(hour=0, minute=0, second=0))
         frequency = "1m"
         length = 300
         data = self.source.raw_history_bars(self.instrument, frequency, end_dt=end,
                                             length=length)
         dts = data["datetime"]
         assert dts[dts > today_i][0] == today_i + 93100
         assert dts[dts < today_i][-1] == yesterday_i + 150000
         assert len(data) == length
         assert 0 < dts[-1] - ei < 100
     except Exception as e:
         print("start: {}".format(start))
         print("end: {}".format(end))
         print("length: {}".format(length))
         print("data:\n{}".format(data))
         raise e
Exemple #24
0
def handle_bar(context, bar_dict):
    bar = bar_dict[context.s1]
    print(bar)
    assert bar.datetime == context.now
    lengths = [5]
    frequencies = ["1m"]
    for l, f in itertools.product(lengths, frequencies):
        # print(pd.DataFrame(history_bars(context.s1, 5, "1d", include_now=True)))
        df = pd.DataFrame(history_bars(context.s1, l, f))
        print(df)
        assert len(df) == l
        assert convert_dt_to_int(context.now) == df["datetime"].iloc[-1]
    if not context.fired:
        # order_percent并且传入1代表买入该股票并且使其占有投资组合的100%
        order_percent(context.s1, 1)
        context.fired = True
 def get_bar(self, instrument, dt, frequency):
     if self.is_base_frequency(instrument, frequency):
         bar_data = self.raw_history_bars(instrument, frequency, end_dt=dt, length=1)
     else:
         num = int(frequency[:-1])
         freq = frequency[-1]
         if freq == "m":
             bar_data = self.raw_history_bars(instrument, "1" + freq, end_dt=dt, length=num)
             bar_data = self._resample_bars(bar_data, frequency)
         else:
             return super(OddFrequencyDataSource, self).get_bar(instrument, dt, frequency)
     if bar_data is None or not bar_data.size:
         return super(OddFrequencyDataSource, self).get_bar(
             instrument, dt, frequency
         )
     else:
         dti = convert_dt_to_int(dt)
         return bar_data[-1] if bar_data[-1]["datetime"] == dti else None
 def bars(self, l, r, fields=None):
     if fields is None:
         fields = self.ALL_FIELDS
     dtype = OrderedDict([(f, np.uint64 if f == "datetime" else np.float64) for f in fields])
     length = r - l
     result = np.empty(shape=(length,), dtype=list(dtype.items()))
     if not length:
         return result
     result.fill(np.nan)
     for field in fields:
         value = self._client.lrange(self._get_redis_key(field), l, r - 1)
         if field == "datetime":
             value = list(map(lambda x: convert_dt_to_int(parse(x.decode())), value))
         else:
             value = np.array(list(map(lambda x: x.decode(), value)), dtype=np.str)
             value = value.astype(np.float64)
         result[:len(value)][field] = value[:]
     return result
Exemple #27
0
    def _cache_period_bars(self,instrument,start_dt,end_dt,frequency='1m',fields=[],adjust_type = 'pre', adjust_orig = None):
        # # data at start_dt and end_dt are included
        if frequency != '1m':
            raise NotImplementedError

        order_book_id = instrument.order_book_id

        order_book_id = instrument.order_book_id
        code = order_book_id.split(".")[0]

        trading_dates=self.trading_dates_mixin.get_trading_dates(start_dt,end_dt)

        _is_None=True

        if instrument.type == 'CS':
            data=QA.QA_fetch_stock_day(code,end_dt.strftime('%Y-%m-%d'),end_dt.strftime('%Y-%m-%d'))
            if data.size>0 :
                _is_None=False
                tick=QA.QAFetch.QATdx.QA_fetch_get_stock_transaction(code,start_dt.strftime('%Y-%m-%d'),end_dt.strftime('%Y-%m-%d'))
        elif instrument.type == 'INDX':
            data=QA.QA_fetch_index_day(code,end_dt.strftime('%Y-%m-%d'),end_dt.strftime('%Y-%m-%d'))
            if data.size>0 :
                _is_None=False
                tick=QA.QAFetch.QATdx.QA_fetch_get_index_transaction(code,start_dt.strftime('%Y-%m-%d'),end_dt.strftime('%Y-%m-%d'))
        else:
            return None
        if _is_None==False :
            res=QA.QAData.data_resample.QA_data_tick_resample(tick, type_='1min')
            res_min=res.rename(columns={"vol": "volume"})
            df=res_min.reset_index(level=['datetime','code'])
            df=df.loc[df['datetime']<end_dt]
            df['datetime'] = df['datetime'].apply(lambda x: convert_dt_to_int(x))
        else :
            df = pd.DataFrame()

        if order_book_id in self._cache:
            self._cache[order_book_id] = pd.concat([self._cache[order_book_id], df], ignore_index=True)
        else:
            self._cache[order_book_id] = df
        try:
            self._cached_dates[order_book_id] += trading_dates
            # self._cached_dates[order_book_id] = list(set(self._cached_dates[order_book_id]))
        except:
            self._cached_dates[order_book_id] = trading_dates
Exemple #28
0
def get_realtime_quotes(code_list, open_only=False):
    import tushare as ts

    max_len = 800
    loop_cnt = int(math.ceil(float(len(code_list)) / max_len))

    total_df = reduce(lambda df1, df2: df1.append(df2),
                      [ts.get_realtime_quotes([code for code in code_list[i::loop_cnt]])
                       for i in range(loop_cnt)])
    total_df["is_index"] = False

    index_symbol = ["sh", "sz", "hs300", "sz50", "zxb", "cyb"]
    index_df = ts.get_realtime_quotes(index_symbol)
    index_df["code"] = index_symbol
    index_df["is_index"] = True
    total_df = total_df.append(index_df)

    columns = set(total_df.columns) - set(["name", "time", "date", "code"])
    # columns = filter(lambda x: "_v" not in x, columns)
    for label in columns:
        total_df[label] = total_df[label].map(lambda x: 0 if str(x).strip() == "" else x)
        total_df[label] = total_df[label].astype(float)

    total_df["chg"] = total_df["price"] / total_df["pre_close"] - 1

    total_df["order_book_id"] = total_df["code"]
    total_df["order_book_id"] = total_df["order_book_id"].apply(tushare_code_2_order_book_id)

    total_df = total_df.set_index("order_book_id").sort_index()

    total_df["datetime"] = total_df["date"] + " " + total_df["time"]
    total_df["datetime"] = total_df["datetime"].apply(lambda x: convert_dt_to_int(datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S")))

    total_df["close"] = total_df["price"]
    total_df["last"] = total_df["price"]

    total_df["limit_up"] = total_df.apply(lambda row: row.pre_close * (1.1 if "ST" not in row["name"] else 1.05), axis=1).round(2)
    total_df["limit_down"] = total_df.apply(lambda row: row.pre_close * (0.9 if "ST" not in row["name"] else 0.95), axis=1).round(2)

    if open_only:
        total_df = total_df[total_df.open > 0]

    return total_df
Exemple #29
0
 def current_snapshot(self, instrument, frequency, dt):
     CONVERSION = {
         'open': 'first',
         'high': 'max',
         'low': 'min',
         'close': 'last',
         'volume': 'sum'
     }
     self._cache_count_bars(instrument=instrument,dt=dt,bar_count=bar_count)
     self._sort_cache(instrument.order_book_id)
     df = self._cache[instrument.order_book_id]
     df = df[df['datetime']<= convert_dt_to_int(dt)]
     df_d=df.resample('d',closed='right').apply(CONVERSION).dropna()       
     
     try:
         snapshot_dict = df_d.to_dict()
     except KeyError:
         return None
     snapshot_dict["last"] = snapshot_dict["close"]
     snapshot_dict["datetime"] = pd.Timestamp(snapshot_dict["datetime"]).to_pydatetime()
     return TickObject(instrument, snapshot_dict)
 def history_bars(self, instrument, bar_count, frequency, fields, dt,
                  skip_suspended=True, include_now=False,
                  adjust_type='pre', adjust_orig=None):
     if self.is_base_frequency(instrument, frequency):
         bar_data = self.raw_history_bars(instrument, frequency, end_dt=dt, length=bar_count)
     else:
         num = int(frequency[:-1])
         freq = frequency[-1]
         if freq == "m":
             lower_bar_count = (bar_count + 1) * num
             bar_data = self.raw_history_bars(instrument, "1" + freq, end_dt=dt, length=lower_bar_count)
             if bar_data is None:
                 return super(OddFrequencyDataSource, self).history_bars(
                     instrument, bar_count, frequency, fields, dt,
                     skip_suspended=skip_suspended, include_now=include_now,
                     adjust_type=adjust_type, adjust_orig=adjust_orig
                 )
             else:
                 if bar_data.size:
                     bar_data = self._resample_bars(bar_data, frequency)
                     dti = convert_dt_to_int(dt)
                     if bar_data["datetime"][-1] != dti and not include_now:
                         bar_data = bar_data[:-1]
                         bar_data = bar_data[-bar_count:]
                     else:
                         bar_data = bar_data[-bar_count:]
                         # TODO 复权以及跳过停牌
         else:
             return super(OddFrequencyDataSource, self).history_bars(
                     instrument, bar_count, frequency, fields, dt,
                     skip_suspended=skip_suspended, include_now=include_now,
                     adjust_type=adjust_type, adjust_orig=adjust_orig
             )
             # if fields is not None:
             #     if not isinstance(fields, six.string_types):
             #         fields = [field for field in fields if field in bar_data]
     return bar_data if fields is None else bar_data[fields]