async def evaluate(self, code: str, frame_type: Union[FrameType, str], flag: str, win: int, c1: float, d1: Frame, c2: float, d2: Frame, slip: float = 0.015): frame_type = FrameType(frame_type) n1 = tf.count_frames(d1, d2, frame_type) slp = (c2 - c1) / n1 n2 = tf.count_frames(d2, tf.floor(arrow.now(), frame_type), frame_type) c_ = c2 + slp * n2 bars = await self.get_bars(code, 1, frame_type) if abs(c_ / bars[-1]['close'] - 1) <= slip: await self.fire_trade_signal(flag, code, bars[-1]['frame'], frame_type)
async def _sync_and_check(self, code, frame_type, start, stop, exp_head=None, exp_tail=None): stop = stop or arrow.now(cfg.tz).datetime exp_head = exp_head or start exp_tail = exp_tail or stop await syncjobs.sync_bars({ "frame_type": frame_type, "start": start, "stop": stop, "secs": [code] }) bars = await self._cache_get_bars_all(code, frame_type) # 本单元测试的k线数据应该没有空洞。如果程序在首尾衔接处出现了问题,就会出现空洞, # 导致期望获得的数据与实际数据数量不相等。 exp_bars_len = tf.count_frames(exp_head, exp_tail, frame_type) bars = list(filter(lambda x: not np.isnan(x["close"]), bars)) self.assertEqual(exp_bars_len, len(bars)) self.assertEqual(exp_head, bars[0]["frame"]) self.assertEqual(exp_tail, bars[-1]["frame"])
async def copy(self, code: str, frame_type: FrameType, start, stop): n = tf.count_frames(start, stop, frame_type) bars = await self.get_bars(code, n + 20, frame_type, stop) feat = features.ma_lines_trend(bars, [5, 10, 20]) ma5, ma10, ma20 = feat["ma5"][0], feat["ma10"][0], feat["ma20"][0] self.max_distance = self.distance(ma5, ma10, ma20) err, (a, b) = signal.polyfit(ma20, deg=1) self.fitslp_ma20 = a self.fiterr_ma20 = err
def test_count_frames_min15(self): X = [ ["2020-03-26 09:45", "2020-03-26 10:00", 2], ["2020-03-26 10:00", "2020-03-27 09:45", 16], ["2020-03-26 10:00", "2020-03-27 13:15", 24], ] for i, (start, end, expected) in enumerate(X): logger.debug("testing %s", X[i]) start = arrow.get(start) end = arrow.get(end) actual = tf.count_frames(start, end, FrameType.MIN15) self.assertEqual(expected, actual)
def test_count_frames_week(self): X = [ ["2020-01-25", 1, "2020-01-23"], ["2020-01-23", 2, "2020-02-07"], ["2020-01-25", 3, "2020-02-14"], ["2020-05-06", 1, "2020-04-30"], ] for i, (start, expected, end) in enumerate(X): logger.debug("testing %s", X[i]) actual = tf.count_frames( arrow.get(start).date(), arrow.get(end).date(), FrameType.WEEK ) self.assertEqual(actual, expected)
def test_count_frames_month(self): X = [ ["2015-02-25", 1, "2015-01-30"], ["2015-02-27", 1, "2015-02-27"], ["2015-03-01", 1, "2015-02-27"], ["2015-03-01", 2, "2015-03-31"], ["2015-03-01", 1, "2015-03-30"], ["2015-03-01", 13, "2016-02-29"], ] for i, (start, expected, end) in enumerate(X): logger.debug("testing %s", X[i]) actual = tf.count_frames( arrow.get(start).date(), arrow.get(end).date(), FrameType.MONTH ) self.assertEqual(expected, actual)
async def sync_and_check( self, code, frame_type, start, stop, expected_head, expected_tail ): await sync.sync_bars_worker( {"start": start, "stop": stop, "frame_type": frame_type}, [code] ) head, tail = await cache.get_bars_range(code, frame_type) n_bars = tf.count_frames(expected_head, expected_tail, frame_type) bars = await cache.get_bars(code, expected_tail, n_bars, frame_type) # 本单元测试的k线数据应该没有空洞。如果程序在首尾衔接处出现了问题,就会出现空洞, # 导致期望获得的数据与实际数据数量不相等。 bars = list(filter(lambda x: not np.isnan(x["close"]), bars)) self.assertEqual(n_bars, len(bars)) self.assertEqual(expected_head, head) self.assertEqual(expected_tail, tail)
def test_count_frames_min1(self): X = [ ("2020-03-26 09:31", 1, "2020-03-26 09:31"), ("2020-03-26 09:31", 2, "2020-03-26 09:32"), ("2020-03-26 11:30", 1, "2020-03-26 11:30"), ("2020-03-26 11:30", 2, "2020-03-26 13:01"), ("2020-03-26 11:30", 3, "2020-03-26 13:02"), ("2020-03-26 15:00", 1, "2020-03-26 15:00"), ("2020-03-26 15:00", 2, "2020-03-27 09:31"), ("2020-03-26 15:00", 242, "2020-03-30 09:31"), ] for i, (start, expected, end) in enumerate(X): logger.debug("testing %s", X[i]) actual = tf.count_frames( arrow.get(start, tzinfo=cfg.tz), arrow.get(end, tzinfo=cfg.tz), FrameType.MIN1, ) self.assertEqual(expected, actual)
async def quick_scan(): # fixme secs = Securities() report = logging.getLogger("quickscan") counters = {} for sync_config in cfg.omega.sync.bars: frame = sync_config.get("frame") start = sync_config.get("start") if frame is None or start is None: logger.warning("skipped %s: required fields are [frame, start]", sync_config) continue frame_type = FrameType(frame) start = arrow.get(start).date() start = tf.floor(start, FrameType.DAY) stop = sync_config.get("stop") or arrow.now().date() if frame_type in tf.minute_level_frames: minutes = tf.ticks[frame_type][0] h, m = minutes // 60, minutes % 60 start = datetime.datetime(start.year, start.month, start.day, h, m, tzinfo=tz.gettz(cfg.tz)) stop = datetime.datetime(stop.year, stop.month, stop.day, 15, tzinfo=tz.gettz(cfg.tz)) counters[frame] = [0, 0] codes = secs.choose(sync_config.get("type")) include = filter(lambda x: x, sync_config.get("include", "").split(",")) include = map(lambda x: x.strip(" "), include) codes.extend(include) exclude = sync_config.get("exclude", "") exclude = map(lambda x: x.strip(" "), exclude) codes = set(codes) - set(exclude) counters[frame][1] = len(codes) for code in codes: head, tail = await cache.get_bars_range(code, frame_type) if head is None or tail is None: report.info("ENOSYNC,%s,%s", code, frame) counters[frame][0] = counters[frame][0] + 1 continue expected = tf.count_frames(head, tail, frame_type) # 'head', 'tail' should be excluded actual = (await cache.security.hlen(f"{code}:{frame_type.value}")) - 2 if actual != expected: report.info("ELEN,%s,%s,%s,%s,%s,%s", code, frame, expected, actual, head, tail) counters[frame][0] = counters[frame][0] + 1 continue sec = Security(code) if start != head: if (type(start) == datetime.date and start > sec.ipo_date or (type(start) == datetime.datetime and start.date() > sec.ipo_date)): report.info("ESTART,%s,%s,%s,%s,%s", code, frame, start, head, sec.ipo_date) counters[frame][0] = counters[frame][0] + 1 continue if tail != stop: report.info("EEND,%s,%s,%s,%s", code, frame, stop, tail) counters[frame][0] = counters[frame][0] + 1 return counters
async def sync_bars_for_security( code: str, frame_type: FrameType, start: Union[datetime.date, datetime.datetime], stop: Union[None, datetime.date, datetime.datetime], ): counters = 0 logger.info("syncing quotes for %s", code) # 取数据库中该frame_type下该code的k线起始点 head, tail = await cache.get_bars_range(code, frame_type) if not all([head, tail]): await cache.clear_bars_range(code, frame_type) n_bars = tf.count_frames(start, stop, frame_type) bars = await aq.get_bars(code, stop, n_bars, frame_type) counters = len(bars) logger.info("finished sync %s(%s), %s bars synced", code, frame_type, counters) return if start < head: n = tf.count_frames(start, head, frame_type) - 1 if n > 0: _end_at = tf.shift(head, -1, frame_type) bars = await aq.get_bars(code, _end_at, n, frame_type) counters += len(bars) logger.debug( "sync %s level bars of %s to %s: expected: %s, actual %s", frame_type, code, _end_at, n, len(bars), ) if len(bars) and bars["frame"][-1] != _end_at: logger.warning( "discrete frames found:%s, bars[-1](%s), " "head(%s)", code, bars["frame"][-1], head, ) if stop > tail: n = tf.count_frames(tail, stop, frame_type) - 1 if n > 0: bars = await aq.get_bars(code, stop, n, frame_type) logger.debug( "sync %s level bars of %s to %s: expected: %s, actual %s", frame_type, code, stop, n, len(bars), ) counters += len(bars) if bars["frame"][0] != tf.shift(tail, 1, frame_type): logger.warning( "discrete frames found: %s, tail(%s), bars[0](" "%s)", code, tail, bars["frame"][0], ) logger.info("finished sync %s(%s), %s bars synced", code, frame_type, counters)
async def load_bars( self, start: Frame, stop: datetime.datetime, frame_type: FrameType, fq=True, turnover=False, ) -> np.ndarray: """ 加载[`start`, `stop`]间的行情数据到`Security`对象中,并返回行情数据。 这里`start`可以等于`stop`。 为加快速度,对分钟级别的turnover数据,均使用当前周期的成交量除以最新报告期的流通股本数, 注意这样得到的是一个近似值。如果近期有解禁股,则同样的成交量,解禁后的换手率应该小于解 禁前。 Args: start: stop: frame_type: fq: 是否进行复权处理 turnover: 是否包含turnover数据。 Returns: """ self._bars = None start = tf.floor(start, frame_type) _stop = tf.floor(stop, frame_type) assert start <= _stop head, tail = await cache.get_bars_range(self.code, frame_type) if not all([head, tail]): # not cached at all, ensure cache pointers are clear await cache.clear_bars_range(self.code, frame_type) n = tf.count_frames(start, _stop, frame_type) if stop > _stop: self._bars = await get_bars(self.code, stop, n + 1, frame_type) else: self._bars = await get_bars(self.code, _stop, n, frame_type) if fq: self.qfq() if turnover: await self._add_turnover(frame_type) return self._bars if start < head: n = tf.count_frames(start, head, frame_type) if n > 0: _end = tf.shift(head, -1, frame_type) self._bars = await get_bars(self.code, _end, n, frame_type) if _stop > tail: n = tf.count_frames(tail, _stop, frame_type) if n > 0: await get_bars(self.code, _stop, n, frame_type) # now all closed bars in [start, _stop] should exist in cache n = tf.count_frames(start, _stop, frame_type) self._bars = await cache.get_bars(self.code, _stop, n, frame_type) if arrow.get(stop) > arrow.get(_stop): bars = await get_bars(self.code, stop, 2, frame_type) if len(bars) == 2 and bars[0]["frame"] == self._bars[-1]["frame"]: self._bars = np.append(self._bars, bars[1]) if fq: self.qfq() if turnover: await self._add_turnover(frame_type) return self._bars
async def _cache_get_bars_all(self, code, frame_type): head, tail = await cache.get_bars_range(code, frame_type) n_bars = tf.count_frames(head, tail, frame_type) return await cache.get_bars(code, tail, n_bars, frame_type)
async def test_parse_sync_params(self): """ 2020年元旦前后交易日如下: 20191224, 20191225, 20191226, 20191227, 20191230, 20191231, 20200102, 20200103, 20200106, 20200107 """ sync_params = { "frame": "1d", "start": "2020-01-01", "delay": "3", "cat": ["stock"], "include": "000001.XSHE 000004.XSHE", "exclude": "000001.XSHE 000001.XSHG", } # 0. stop is None and current time is in opening with mock.patch("arrow.now", return_value=arrow.get("2021-03-01 10:45", tzinfo=cfg.tz)): codes, ft, start, stop, delay = syncjobs.parse_sync_params( **sync_params) self.assertTrue("000001.XSHE" in codes) self.assertEqual(FrameType.DAY, ft) self.assertEqual(start, datetime.date(2019, 12, 31)) self.assertEqual(stop, datetime.date(2021, 2, 26)) self.assertEqual(3, delay) # 1. stop is None, and current time is after closed with mock.patch("arrow.now", return_value=arrow.get("2021-3-1 16:00", tzinfo=cfg.tz)): codes, ft, start, stop, delay = syncjobs.parse_sync_params( **sync_params) self.assertEqual(FrameType.DAY, ft) self.assertEqual(start, datetime.date(2019, 12, 31)) self.assertEqual(stop, datetime.date(2021, 3, 1)) self.assertEqual(3, delay) # 2. MIN5 frame, start is type of `date` could start align to first_frame? sync_params["frame"] = "5m" codes, ft, start, stop, delay = syncjobs.parse_sync_params( **sync_params) self.assertEqual(FrameType.MIN5, ft) self.assertEqual(start, arrow.get("2019-12-31 09:35", tzinfo=cfg.tz)) # 3. given start of type `datetime`, could it align to latest closed frame? sync_params["start"] = "2020-01-01 10:36" codes, ft, start, stop, delay = syncjobs.parse_sync_params( **sync_params) self.assertEqual(start, arrow.get("2019-12-31 15:00", tzinfo=cfg.tz)) # 4. give stop of type `date`, could it align to last_frame? sync_params["stop"] = "2020-01-02" codes, ft, start, stop, delay = syncjobs.parse_sync_params( **sync_params) self.assertEqual(stop, arrow.get("2020-01-02 15:00", tzinfo=cfg.tz)) # 5. both start and stop are None expected_stop = arrow.get("2020-1-2 10:35", tzinfo=cfg.tz) with mock.patch("arrow.now", return_value=arrow.get("2020-1-2 10:36", tzinfo=cfg.tz)): sync_params["stop"] = None sync_params["start"] = None codes, ft, start, stop, delay = syncjobs.parse_sync_params( **sync_params) self.assertEqual(expected_stop, stop) self.assertEqual(1000, tf.count_frames(start, expected_stop, ft))
async def sync_bars_for_security( code: str, frame_type: FrameType, start: Union[datetime.date, datetime.datetime], stop: Union[None, datetime.date, datetime.datetime], ): counters = 0 # 取数据库中该frame_type下该code的k线起始点 head, tail = await cache.get_bars_range(code, frame_type) if not all([head, tail]): await cache.clear_bars_range(code, frame_type) n_bars = tf.count_frames(start, stop, frame_type) bars = await aq.get_bars(code, stop, n_bars, frame_type) if bars is not None and len(bars): logger.debug( "sync %s(%s), from %s to %s: actual got %s ~ %s (%s)", code, frame_type, start, head, bars[0]["frame"], bars[-1]["frame"], len(bars), ) counters = len(bars) return if start < head: n = tf.count_frames(start, head, frame_type) - 1 if n > 0: _end_at = tf.shift(head, -1, frame_type) bars = await aq.get_bars(code, _end_at, n, frame_type) if bars is not None and len(bars): counters += len(bars) logger.debug( "sync %s(%s), from %s to %s: actual got %s ~ %s (%s)", code, frame_type, start, head, bars[0]["frame"], bars[-1]["frame"], len(bars), ) if bars["frame"][-1] != _end_at: logger.warning( "discrete frames found:%s, bars[-1](%s), " "head(%s)", code, bars["frame"][-1], head, ) if stop > tail: n = tf.count_frames(tail, stop, frame_type) - 1 if n > 0: bars = await aq.get_bars(code, stop, n, frame_type) if bars is not None and len(bars): logger.debug( "sync %s(%s), from %s to %s: actual got %s ~ %s (%s)", code, frame_type, tail, stop, bars[0]["frame"], bars[-1]["frame"], len(bars), ) counters += len(bars) if bars["frame"][0] != tf.shift(tail, 1, frame_type): logger.warning( "discrete frames found: %s, tail(%s), bars[0](" "%s)", code, tail, bars["frame"][0], )