async def test_000_load(self): s = Securities() # invalidate cache, then load from remote await cache.security.delete("securities") await s.load() logger.info(s) self.assertEqual(s[0]["code"], "000001.XSHE") # read from cache s.reset() await s.load() self.assertEqual(s[0]["code"], "000001.XSHE") self.assertEqual(s["000001.XSHE"]["display_name"], "平安银行")
async def scan(self, stop: Frame = None): start = tf.shift(stop, -26, FrameType.WEEK) ERR = {5: 0.008, 10: 0.004, 20: 0.004} for code in Securities().choose(['stock']): #for code in ['002150.XSHE']: sec = Security(code) bars = await sec.load_bars(start, stop, FrameType.WEEK) if bars[-1]['frame'] != stop: raise ValueError("") t1, t2, t3 = False, False, False params = [] for win in [5, 10, 20]: ma = signal.moving_average(bars['close'], win) err, (a, b, c), (vx, _) = signal.polyfit(ma[-7:] / ma[-7]) if err > ERR[win]: continue p = np.poly1d((a, b, c)) slp3 = round(p(9) / p(6) - 1, 2) params.append(np.round([slp3, a, b], 4)) if win == 5: t1 = slp3 >= 0.03 and a > 0.005 if win == 10: t2 = slp3 >= 0.02 and (b > abs(10 * a) or a > 0.0005) if win == 20: t3 = slp3 >= -1e-6 and a >= 0 if all([t1, t2, t3]): print(sec.display_name, params)
async def test_longparallel(self): plot = LongParallel() codes = Securities().choose(['stock']) for code in codes: # ['000012.XSHE']: dt = arrow.get('2020-6-24') await plot.evaluate(code, FrameType.DAY, dt, 15) print(plot.results)
async def asyncSetUp(self) -> None: # check if omega is running self.omega = await start_omega() await omicron.init() self.securities = Securities() await self.securities.load()
async def test_evaluate(self): scheduler = AsyncIOScheduler(timezone=cfg.tz) plot = DuckPlot(scheduler) secs = Securities().choose(['stock']) for code in secs: dt = tf.floor(arrow.get('2020-7-24'), FrameType.DAY) try: await plot.evaluate(code, FrameType.DAY, dt=dt) except Exception as e: logger.exception(e)
async def scan_1(self, ma_win: int, frame_type: FrameType, a: float = None, b: float = None, err=1e-3, end: Frame = None): """ 在所有股票中,寻找指定均线强于拟合均线(a,b,1)的,如果当前收盘价在均线附近,且近期存在 大阳线,则发出信号 Returns: """ if a is None: err = self.ref_lines[f"ma{ma_win}"].get("err") a, b = self.ref_lines[f"ma{ma_win}"].get("coef") fit_win = 7 secs = Securities() p = np.poly1d((a, b, 1.0)) slp3 = p(fit_win - 1 + 3) / p(fit_win - 1) - 1 count = 0 for i, code in enumerate(secs.choose(['stock'])): if (i + 1) % 500 == 0: logger.debug("handled %s", i + 1) sec = Security(code) bars = await self.get_bars(code, fit_win + 19, frame_type, end) ma = signal.moving_average(bars['close'], ma_win) err_, (a_, b_, c_), (vx_, _) = signal.polyfit(ma[-fit_win:] / ma[-fit_win]) if err_ > err: continue #p_ = np.poly1d((a_,b_,1.0)) # 如果abs(b) < fit_win * a,曲线(在x不超过fit_win的地方)接近于直线,此时应该比较b t5, t10, t20 = False, None, None #slp3_5 = p_(fit_win+2)/p_(fit_win-1) - 1 t5 = a_ >= a * 0.99 and fit_win + 1 >= vx_ >= fit_win - 2 if t5: print(f"{sec.display_name},{vx_:.1f}")
async def fire_long(self, end: Frame, frame_type: FrameType.DAY, win=60, adv=0.03): secs = Securities() results = [] for code in secs.choose(['stock']): #for code in ['601238.XSHG']: sec = Security(code) if sec.name.find("ST") != -1 or sec.code.startswith("688"): continue start = tf.shift(end, -win + 1, frame_type) bars = await sec.load_bars(start, end, frame_type) ilow = np.argmin(bars['low']) if ilow > win // 2: #创新低及后面的反弹太近,信号不可靠 continue low = bars['low'][ilow] last = bars['low'][-5:] if np.count_nonzero((last > low) & (last < low * 1.02)) < 3: # 对新低的测试不够 continue c1, c0 = bars['close'][-2:] # 今天上涨幅度是否大于adv? if c0 / c1 - 1 < adv: continue # 是否站上5日线10日线? ma5 = signal.moving_average(bars['close'], 5) ma10 = signal.moving_average(bars['close'], 10) if c0 < ma5[-1] or c0 < ma10[-1]: continue price_change = await sec.price_change(end, tf.day_shift(end, 5), frame_type) print(f"FIRED:{end}\t{code}\t{price_change:.2f}") results.append([end, code, price_change]) return results
def __init__(self, code: str): self._code = code ( _, self._display_name, self._name, self._start_date, self._end_date, _type, ) = Securities()[code] self._type = SecurityType(_type) self._bars = None
async def _build_train_data(self, frame_type: FrameType, n: int, max_error: float = 0.01): """ 从最近的符合条件的日期开始,遍历股票,提取特征和标签,生成数据集。 Args: n: 需要采样的样本数 Returns: """ watch_win = 5 max_curve_len = 5 max_ma_win = 20 # y_stop = arrow.get('2020-7-24').date() y_stop = tf.floor(arrow.now(tz=cfg.tz), frame_type) y_start = tf.shift(y_stop, -watch_win + 1, frame_type) x_stop = tf.shift(y_start, -1, frame_type) x_start = tf.shift(x_stop, -(max_curve_len + max_ma_win - 1), frame_type) data = [] while len(data) < n: for code in Securities().choose(['stock']): #for code in ['000601.XSHE']: try: sec = Security(code) x_bars = await sec.load_bars(x_start, x_stop, FrameType.DAY) y_bars = await sec.load_bars(y_start, y_stop, FrameType.DAY) # [a, b, axis] * 3 x = self.extract_features(x_bars, max_error) if len(x) == 0: continue y = np.max(y_bars['close']) / x_bars[-1]['close'] - 1 if np.isnan(y): continue feature = [code, tf.date2int(x_stop)] feature.extend(x) data.append(feature) except Exception as e: logger.warning("Failed to extract features for %s (%s)", code, x_stop) logger.exception(e) if len(data) >= n: break if len(data) % 500 == 0: logger.info("got %s records.", len(data)) y_stop = tf.day_shift(y_stop, -1) y_start = tf.day_shift(y_start, -1) x_stop = tf.day_shift(y_start, -1) x_start = tf.day_shift(x_start, -1) return data
async def find_by_moving_average(self): result = [] for code in Securities().choose(['stock']): day = arrow.now().date() sec = Security(code) try: signal, fit = await self.test_signal(sec, day) if abs(signal) == 1: result.append([code, day, signal, fit]) except Exception as e: logger.info(e) continue # reporter.info("%s,%s,%s,%s,%s,%s,%s,%s,%s", # code, day, signal, *fit[0], *fit[1], *fit[2]) return result
async def test_evaluate(self): from pyemit import emit await emit.start(emit.Engine.REDIS, dsn=cfg.redis.dsn) scheduler = AsyncIOScheduler(timezone=cfg.tz) mm.init(scheduler) plot = MaLine() plot.max_fire_time = 2 secs = Securities().choose(['stock']) for code in secs: await plot.evaluate(code, FrameType.DAY, 'both', 5, job_name='mock_job_name') await plot.evaluate(code, FrameType.DAY, 'both', 5, job_name='mock_job_name')
async def test_001_choose(self): s = Securities() result = s.choose(["stock", "index"]) self.assertEqual("000001.XSHE", result[0]) result = s.choose(["stock"], exclude_300=True) self.assertTrue(all([not x.startswith("300") for x in result])) result = s.choose(["stock"], exclude_st=True) for code in result: sec = Security(code) self.assertTrue(sec.display_name.upper().find("ST") == -1) result = s.choose(["stock"], exclude_688=True) self.assertTrue(all([not x.startswith("688") for x in result]))
async def distribution(self): # 涨停、跌停 zt, dt = 0, 0 codes = Securities().choose(['stock']) end = arrow.now(cfg.tz).floor('minute').datetime pct = [] async for code, bars in Security.load_bars_batch( codes, end, 2, FrameType.DAY): c1, c0 = bars[-2:]['close'] if (c0 + 0.01) / c1 - 1 > 0.1: zt += 1 if (c0 - 0.01) / c1 - 1 < -0.1: dt += 1 pct.append(c0 / c1 - 1) # 分布 cuts = np.histogram( pct, bins=[-0.2, -0.1, -0.07, -0.03, 0, 0.03, 0.07, 0.1, 0.2]) self.price_change_history.append((zt, dt, cuts)) if len(self.price_change_history) == 8: self.price_change_history.pop(0) now = arrow.now(tz=cfg.tz) if now.hour >= 15: dt = tf.date2int(now) await cache.sys.hset( f"glance{dt}", "distribution", json.dumps({ "zt": zt, "dt": dt, "cuts": cuts })) return zt, dt, cuts
async def quick_scan(): # fixme secs = Securities() report = logging.getLogger("quickscan") counters = {} for sync_config in cfg.omega.sync.bars: frame = sync_config.get("frame") start = sync_config.get("start") if frame is None or start is None: logger.warning("skipped %s: required fields are [frame, start]", sync_config) continue frame_type = FrameType(frame) start = arrow.get(start).date() start = tf.floor(start, FrameType.DAY) stop = sync_config.get("stop") or arrow.now().date() if frame_type in tf.minute_level_frames: minutes = tf.ticks[frame_type][0] h, m = minutes // 60, minutes % 60 start = datetime.datetime(start.year, start.month, start.day, h, m, tzinfo=tz.gettz(cfg.tz)) stop = datetime.datetime(stop.year, stop.month, stop.day, 15, tzinfo=tz.gettz(cfg.tz)) counters[frame] = [0, 0] codes = secs.choose(sync_config.get("type")) include = filter(lambda x: x, sync_config.get("include", "").split(",")) include = map(lambda x: x.strip(" "), include) codes.extend(include) exclude = sync_config.get("exclude", "") exclude = map(lambda x: x.strip(" "), exclude) codes = set(codes) - set(exclude) counters[frame][1] = len(codes) for code in codes: head, tail = await cache.get_bars_range(code, frame_type) if head is None or tail is None: report.info("ENOSYNC,%s,%s", code, frame) counters[frame][0] = counters[frame][0] + 1 continue expected = tf.count_frames(head, tail, frame_type) # 'head', 'tail' should be excluded actual = (await cache.security.hlen(f"{code}:{frame_type.value}")) - 2 if actual != expected: report.info("ELEN,%s,%s,%s,%s,%s,%s", code, frame, expected, actual, head, tail) counters[frame][0] = counters[frame][0] + 1 continue sec = Security(code) if start != head: if (type(start) == datetime.date and start > sec.ipo_date or (type(start) == datetime.datetime and start.date() > sec.ipo_date)): report.info("ESTART,%s,%s,%s,%s,%s", code, frame, start, head, sec.ipo_date) counters[frame][0] = counters[frame][0] + 1 continue if tail != stop: report.info("EEND,%s,%s,%s,%s", code, frame, stop, tail) counters[frame][0] = counters[frame][0] + 1 return counters
async def start_validation(): """ 将待校验的证券按CPU个数均匀划分,创建与CPU个数相同的子进程来执行校验。校验的起始时间由数据 库中jobs.bars_validation.range.start和jobs.bars_validation.range.stop来决定,每次校验 结束后,将jobs.bars_validation.range.start更新为校验截止的最后交易日。如果各个子进程报告 的截止交易日不一样(比如发生了异常),则使用最小的交易日。 """ global validation_errors, no_validation_error_days validation_errors = [] secs = Securities() cpu_count = psutil.cpu_count() # to check if the range is right pl = cache.sys.pipeline() pl.get("jobs.bars_validation.range.start") pl.get("jobs.bars_validation.range.end") start, end = await pl.execute() if start is None: if cfg.omega.validation.start is None: logger.warning( "start of validation is not specified, validation aborted.") return else: start = tf.date2int(arrow.get(cfg.omega.validation.start)) else: start = int(start) if end is None: end = tf.date2int(tf.floor(arrow.now().date(), FrameType.DAY)) else: end = int(end) assert start <= end no_validation_error_days = set(tf.day_frames[(tf.day_frames >= start) & (tf.day_frames <= end)]) # fixme: do validation per frame_type # fixme: test fail. Rewrite this before 0.6 releases codes = secs.choose(cfg.omega.sync) await cache.sys.delete("jobs.bars_validation.scope") await cache.sys.lpush("jobs.bars_validation.scope", *codes) logger.info("start validation %s secs from %s to %s.", len(codes), start, end) emit.register(Events.OMEGA_VALIDATION_ERROR, on_validation_error) t0 = time.time() code = ("from omega.core.sanity import do_validation_process_entry; " "do_validation_process_entry()") procs = [] for i in range(cpu_count): proc = subprocess.Popen([sys.executable, "-c", code], env=os.environ) procs.append(proc) timeout = 3600 while timeout > 0: await asyncio.sleep(2) timeout -= 2 for proc in procs: proc.poll() if all([proc.returncode is not None for proc in procs]): break if timeout <= 0: for proc in procs: try: os.kill(proc.pid, signal.SIGTERM) except Exception: pass # set next start point validation_days = set(tf.day_frames[(tf.day_frames >= start) & (tf.day_frames <= end)]) diff = validation_days - no_validation_error_days if len(diff): last_no_error_day = min(diff) else: last_no_error_day = end await cache.sys.set("jobs.bars_validation.range.start", last_no_error_day) elapsed = time.time() - t0 logger.info( "Validation cost %s seconds, validation will start at %s next time", elapsed, last_no_error_day, )
async def test_fuzzy_match(self): for query in ["600001", "PFYH", "浦发"]: result = Securities().fuzzy_match(query) self.assertTrue(len(result) != 0, f"{query}")
async def test_001_choose(self): s = Securities() result = s.choose(["stock", "index"]) self.assertEqual("000001.XSHE", result[0])
def parse_sync_params( frame: Union[str, Frame], cat: List[str] = None, start: Union[str, datetime.date] = None, stop: Union[str, Frame] = None, delay: int = 0, include: str = "", exclude: str = "", ) -> Tuple: """按照[使用手册](usage.md#22-如何同步K线数据)中的规则,解析和补全同步参数。 如果`frame_type`为分钟级,则当`start`指定为`date`类型时,自动更正为对应交易日的起始帧; 当`stop`为`date`类型时,自动更正为对应交易日的最后一帧。 Args: frame (Union[str, Frame]): frame type to be sync. The word ``frame`` is used here for easy understand by end user. It actually implies "FrameType". cat (List[str]): which catetories is about to be synced. Should be one of ['stock', 'index']. Defaults to None. start (Union[str, datetime.date], optional): [description]. Defaults to None. stop (Union[str, Frame], optional): [description]. Defaults to None. delay (int, optional): [description]. Defaults to 5. include (str, optional): which securities should be included, seperated by space, for example, "000001.XSHE 000004.XSHE". Defaults to empty string. exclude (str, optional): which securities should be excluded, seperated by a space. Defaults to empty string. Returns: - codes (List[str]): 待同步证券列表 - frame_type (FrameType): - start (Frame): - stop (Frame): - delay (int): """ frame_type = FrameType(frame) if frame_type in tf.minute_level_frames: if stop: stop = arrow.get(stop, tzinfo=cfg.tz) if stop.hour == 0: # 未指定有效的时间帧,使用当日结束帧 stop = tf.last_min_frame(tf.day_shift(stop.date(), 0), frame_type) else: stop = tf.floor(stop, frame_type) else: stop = tf.floor(arrow.now(tz=cfg.tz).datetime, frame_type) if stop > arrow.now(tz=cfg.tz): raise ValueError(f"请勿将同步截止时间设置在未来: {stop}") if start: start = arrow.get(start, tzinfo=cfg.tz) if start.hour == 0: # 未指定有效的交易帧,使用当日的起始帧 start = tf.first_min_frame(tf.day_shift(start.date(), 0), frame_type) else: start = tf.floor(start, frame_type) else: start = tf.shift(stop, -999, frame_type) else: stop = (stop and arrow.get(stop).date()) or arrow.now().date() if stop == arrow.now().date(): stop = arrow.now(tz=cfg.tz) stop = tf.floor(stop, frame_type) start = tf.floor( (start and arrow.get(start).date()), frame_type) or tf.shift( stop, -1000, frame_type) secs = Securities() codes = secs.choose(cat or []) exclude = map(lambda x: x, exclude.split(" ")) codes = list(set(codes) - set(exclude)) include = list(filter(lambda x: x, include.split(" "))) codes.extend(include) return codes, frame_type, start, stop, int(delay)
async def scan(self, end: Frame = None, frame_type: FrameType = FrameType.DAY, codes=None, adv_limit=0.3): """ Args: end: adv_limit: 不包括在win周期内涨幅超过adv_limit的个股 Returns: """ win = 20 secs = Securities() end = end or tf.floor(arrow.now(), FrameType.DAY) results = [] holdings = await cache.sys.smembers("holdings") for i, code in enumerate(secs.choose(['stock'])): try: if code in holdings: # 如果已经持仓,则不跟踪评估 continue sec = Security(code) if sec.code.startswith('688') or sec.display_name.find('ST') != -1: continue start = tf.day_shift(end, -270) bars = await sec.load_bars(start, end, FrameType.DAY) close = bars['close'] ma5 = signal.moving_average(close, 5) ma250 = signal.moving_average(close, 250) cross, idx = signal.cross(ma5[-win:], ma250[-win:]) cross_day = bars[-win + idx]['frame'] if cross != 1: continue ma20 = signal.moving_average(close, 20) ma120 = signal.moving_average(close, 120) # 如果上方还有月线和ma120线,则不发出信号,比如广州浪奇 2020-7-23,泛海控股2020-8-3 if close[-1] < ma120[-1] or close[-1] < ma20[-1]: continue # 计算20日以来大阳次数。如果不存在大阳线,认为还未到上涨时机,跳过 grl, ggl = features.count_long_body(bars[-20:]) if grl == 0: continue # # # 计算突破以来净余买量(用阳线量减去阴线量来模拟,十字星不计入) # bsc = bars[-10 + idx:] # bars_since_open: included both side # ups = bsc[bsc['close'] > (bsc['open'] * 1.01)] # downs = bsc[bsc['open'] > (bsc['close'] * 0.99)] # balance = np.sum(ups['volume']) - np.sum(downs['volume']) # pc = await sec.price_change(cross_day, tf.day_shift(cross_day, 5), # FrameType.DAY, return_max=True) # faf = int(win - idx) # frames after fired adv = await sec.price_change(tf.day_shift(end, -win), end, FrameType.DAY, False) if adv > adv_limit: continue logger.info(f"{sec}上穿年线\t{cross_day}\t{faf}") await cache.sys.hmset_dict("plots.crossyear", {code: json.dumps({ "fired_at": tf.date2int(end), "cross_day": tf.date2int(cross_day), "faf": faf, "grl": grl, "ggl": ggl, "status": 0 # 0 - generated by plots 1 - disabled manually })}) results.append( [sec.display_name, tf.date2int(end), tf.date2int(cross_day), faf, grl, ggl]) except Exception as e: logger.exception(e) logger.info("done crossyear scan.") return results
async def trigger_bars_sync(frame_type: FrameType, sync_params: dict = None, force=False): """初始化bars_sync的任务,发信号给各quotes_fetcher进程以启动同步。 同步的时间范围指定均为日级别。如果是非交易日,自动对齐到上一个已收盘的交易日,使用两端闭合区 间(截止frame直到已收盘frame)。 如果未指定同步结束日期,则同步到当前已收盘的交易日。 Args: frame_type (FrameType): 将同步的周期 sync_params (dict): 同步需要的参数 secs (List[str]): 将同步的证券代码,如果为None,则使用sync_sec_type定义的类型来 选择要同步的证券代码。 sync_sec_type: List[str] start: 起始日 stop: 截止日。如未指定,同步到已收盘日 delay: seconds for sync to wait. force: 即使当前不是交易日,是否也强行进行同步。 Returns: """ if not force and not tf.is_trade_day(arrow.now()): return key_scope = f"jobs.bars_sync.scope.{frame_type.value}" if sync_params is None: sync_params = read_sync_params(frame_type) if not sync_params: logger.warning("sync_params is required for sync.") return codes = sync_params.get("secs") if codes is None: secs = Securities() codes = secs.choose(sync_params.get("type")) include = filter(lambda x: x, sync_params.get("include", "").split(",")) include = map(lambda x: x.strip(" "), include) codes.extend(include) exclude = sync_params.get("exclude", "") exclude = map(lambda x: x.strip(" "), exclude) codes = set(codes) - set(exclude) if len(codes) == 0: logger.warning("no securities are specified for sync %s", frame_type) return logger.info("add %s securities into sync queue(%s)", len(codes), frame_type) pl = cache.sys.pipeline() pl.delete(key_scope) pl.lpush(key_scope, *codes) await pl.execute() await asyncio.sleep(sync_params.get("delay", 0)) await _start_job_timer("sync") await emit.emit( Events.OMEGA_DO_SYNC, { "frame_type": frame_type, "start": sync_params.get("start"), "stop": sync_params.get("stop"), }, ) logger.info("%s send to fetchers.", Events.OMEGA_DO_SYNC)
async def test_choose_cyb(self): cyb = Securities().choose_cyb() self.assertTrue(len(cyb) > 0) self.assertTrue(all([x.startswith("300") for x in cyb]))
async def fire_long(self, end: Frame = None, overlap_win=10, frame_type: FrameType = FrameType.MIN30): """ 寻找开多仓信号 Args: Returns: """ result = [] end = end or arrow.now().datetime secs = Securities() for code in secs.choose(['stock']): #for code in ['600139.XSHG']: try: sec = Security(code) start = tf.shift(end, -(60 + overlap_win - 1), frame_type) bars = await sec.load_bars(start, end, frame_type) mas = {} for win in [5, 10, 20, 60]: ma = signal.moving_average(bars['close'], win) mas[f"{win}"] = ma # 收盘价高于各均线值 c1, c0 = bars['close'][-2:] t1 = c0 > mas["5"][-1] and c0 > mas["10"][-1] and c0 > mas["20"][-1] \ and c0 > mas["60"][-1] # 60均线斜率向上 slope_60, err = signal.slope(mas["60"][-10:]) if err is None or err > 5e-4: continue t2 = slope_60 >= 5e-4 # 均线粘合 diff = np.abs(mas["5"][-6:-1] - mas["10"][-6:-1]) / mas["10"][-6:-1] overlap_5_10 = np.count_nonzero(diff < 5e-3) t3 = overlap_5_10 > 3 diff = np.abs(mas["10"][-10:] - mas["60"][-10:]) / mas["60"][-10:] overlap_10_60 = np.count_nonzero(diff < 5e-3) t4 = overlap_10_60 > 5 price_change = await sec.price_change( end, tf.shift(end, 8, frame_type), frame_type) result.append( [end, code, t1, t2, t3, t4, slope_60, price_change, True]) if t1 and t2 and t3 and t4: print("FIRED:", [ end, code, t1, t2, t3, t4, slope_60, price_change, True ]) except Exception as e: pass return result
async def scan(self, frame_type: Union[str, FrameType] = FrameType.DAY, end: Frame = None, codes: List[str] = None): logger.info("running momentum scan at %s level", frame_type) if end is None: end = arrow.now(cfg.tz).datetime assert type(end) in (datetime.date, datetime.datetime) frame_type = FrameType(frame_type) ft = frame_type.value codes = codes or Securities().choose(['stock']) day_bars = {} async for code, bars in Security.load_bars_batch( codes, end, 2, FrameType.DAY): day_bars[code] = bars if len(day_bars) == 0: return async for code, bars in Security.load_bars_batch( codes, end, 11, frame_type): if len(bars) < 11: continue fired = bars[-1]['frame'] day_bar = day_bars.get(code) if day_bar is None: continue c1, c0 = day_bars.get(code)[-2:]['close'] cmin = min(bars['close']) # 还处在下跌状态、或者涨太多 if c0 == cmin or (c0 / c1 - 1) > self.baseline(f"up_limit"): continue ma5 = signal.moving_average(bars['close'], 5) err, (a, b, c), (vx, _) = signal.polyfit(ma5[-7:] / ma5[-7]) # 无法拟合,或者动能不足 if err > self.baseline(f"ma5:{ft}:err") or a < self.baseline( f"ma5:{ft}:a"): continue # 时间周期上应该是信号刚出现,还在窗口期内 vx_range = self.baseline(f"ma5:{ft}:vx") if not vx_range[0] < vx < vx_range[1]: continue p = np.poly1d((a, b, c)) y = p(9) / p(6) - 1 # 如果预测未来三周期ma5上涨幅度不够 if y < self.baseline(f"ma5:{ft}:y"): continue sec = Security(code) if frame_type == FrameType.DAY: start = tf.shift(tf.floor(end, frame_type), -249, frame_type) bars250 = await sec.load_bars(start, end, frame_type) ma60 = signal.moving_average(bars250['close'], 60) ma120 = signal.moving_average(bars250['close'], 120) ma250 = signal.moving_average(bars250['close'], 250) # 上方无均线压制 if (c0 > ma60[-1]) and (c0 > ma120[-1]) and (c0 > ma250[-1]): logger.info("%s, %s, %s, %s, %s, %s", sec, round(a, 4), round(b, 4), round(vx, 1), round(c0 / c1 - 1, 3), round(y, 3)) await self.enter_stock_pool(code, fired, frame_type, a=a, b=b, err=err, y=y, vx=self.fit_win - vx) elif frame_type == FrameType.WEEK: await self.enter_stock_pool(code, fired, frame_type, a=a, b=b, err=err, y=y, vx=self.fit_win - vx) elif frame_type == FrameType.MIN30: await self.fire_trade_signal('long', code, fired, frame_type, a=a, b=b, err=err, y=y, vx=self.fit_win - vx)
async def _test_201_start_job_validation(self): # fixme: recover this testcase later secs = Securities() with mock.patch.object(secs, "choose", return_value=["000001.XSHE"]): await omega.core.sanity.start_validation()
async def fuzzy_match(request): query = request.args.get('query') results = Securities().fuzzy_match(query) dumps = functools.partial(json.dumps, cls=MyJsonDumper) return response.json(results, dumps=dumps)