def get_x_y_timestamps(self, start_timestamp, end_timestamp): x_timestamps = [] y_timestamps = [] x_timestamp = start_timestamp y_timestamp = next_date(x_timestamp, self.predict_range) while y_timestamp <= end_timestamp: x_timestamps.append(x_timestamp) y_timestamps.append(y_timestamp) x_timestamp = y_timestamp y_timestamp = next_date(x_timestamp, self.predict_range) return x_timestamps, y_timestamps
def record_stock_data(data_provider="em", entity_provider="em", sleeping_time=2): # A股标的 run_data_recorder(domain=Stock, data_provider=data_provider, force_update=False) # A股后复权行情 run_data_recorder( domain=Stock1dHfqKdata, data_provider=data_provider, entity_provider=entity_provider, day_data=True, sleeping_time=sleeping_time, ) # 板块(概念,行业) run_data_recorder(domain=Block, data_provider="eastmoney", force_update=False) # 板块行情(概念,行业) run_data_recorder(domain=Block1dKdata, data_provider="em", day_data=True, sleeping_time=sleeping_time) # 报告新概念和行业 email_action = EmailInformer() list_date = next_date(current_date(), -90) df = Block.query_data( filters=[Block.category == BlockCategory.concept.value, Block.list_date >= list_date], index="entity_id" ) # add them to eastmoney try: add_to_eastmoney(codes=df["code"], entity_type="block", group="新概念", over_write=False) except Exception as e: email_action.send_message( zvt_config["email_username"], f"report_concept error", "report_concept error:{}".format(e) )
def init_selectors(self, entity_ids, entity_schema, exchanges, codes, start_timestamp, end_timestamp, adjust_type=None): # 日线策略 start_timestamp = next_date(start_timestamp, -50) day_selector = TargetSelector(entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, provider='joinquant', level=IntervalLevel.LEVEL_1DAY, long_threshold=0.7) day_gold_cross_factor = GoldCrossFactor( entity_ids=entity_ids, entity_schema=entity_schema, exchanges=exchanges, codes=codes, start_timestamp=start_timestamp, end_timestamp=end_timestamp, provider='joinquant', level=IntervalLevel.LEVEL_1DAY) day_selector.add_filter_factor(day_gold_cross_factor) self.selectors.append(day_selector)
def record(self, entity, start, end, size, timestamps): if start: start_date = to_time_str(next_date(start)) else: start_date = None datas = em_api.get_dragon_and_tiger(code=entity.code, start_date=start_date) if datas: records = [] for data in datas: timestamp = to_pd_timestamp(data["TRADE_DATE"]) record = { "id": "{}_{}_{}".format( entity.id, data["TRADE_ID"], to_time_str(timestamp, fmt=TIME_FORMAT_DAY)), "entity_id": entity.id, "timestamp": timestamp, "code": entity.code, "name": entity.name, "reason": data["EXPLANATION"], "turnover": data["ACCUM_AMOUNT"], "change_pct": data["CHANGE_RATE"], "net_in": data["NET_BUY"], } # 营业部列表 deps = data["LIST"] for dep in deps: flag = "" if dep["TRADE_DIRECTION"] == "0" else "_" rank = dep["RANK"] dep_name = f"dep{flag}{rank}" dep_in = f"{dep_name}_in" dep_out = f"{dep_name}_out" dep_rate = f"{dep_name}_rate" record[dep_name] = dep["OPERATEDEPT_NAME"] record[dep_in] = dep["BUY_AMT_REAL"] record[dep_out] = dep["SELL_AMT_REAL"] record[dep_rate] = (dep["BUY_RATIO"] if dep["BUY_RATIO"] else 0) - (dep["SELL_RATIO"] if dep["SELL_RATIO"] else 0) records.append(record) df = pd.DataFrame.from_records(records) df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) else: self.logger.info(f"no data for {entity.id}")
def get_good_players( timestamp=current_date(), recent_days=400, intervals=(3, 5, 10)): end_timestamp = next_date(timestamp, -intervals[-1] - 30) # recent year start_timestamp = next_date(end_timestamp, -recent_days) print(f"{start_timestamp} to {end_timestamp}") # 最近一年牛x的营业部 players = get_big_players(start_timestamp=start_timestamp, end_timestamp=end_timestamp) logger.info(players) df = get_player_success_rate(start_timestamp=start_timestamp, end_timestamp=end_timestamp, intervals=intervals, players=players) good_players = df[(df["rate_3"] > 0.4) & (df["rate_5"] > 0.3) & (df["rate_10"] > 0.3)].index.tolist() return good_players
def get_player_performance(start_timestamp, end_timestamp=None, days=5, players="机构专用", provider="em", buy_rate=5): filters = [] if isinstance(players, str): players = [players] if isinstance(players, list): for player in players: filters.append( or_( and_(DragonAndTiger.dep1 == player, DragonAndTiger.dep1_rate >= buy_rate), and_(DragonAndTiger.dep2 == player, DragonAndTiger.dep2_rate >= buy_rate), and_(DragonAndTiger.dep3 == player, DragonAndTiger.dep3_rate >= buy_rate), and_(DragonAndTiger.dep4 == player, DragonAndTiger.dep4_rate >= buy_rate), and_(DragonAndTiger.dep5 == player, DragonAndTiger.dep5_rate >= buy_rate), )) else: raise AssertionError("players should be list or str type") df = DragonAndTiger.query_data( start_timestamp=start_timestamp, end_timestamp=end_timestamp, filters=filters, index=["entity_id", "timestamp"], provider=provider, ) df = df[~df.index.duplicated(keep="first")] records = [] for entity_id, timestamp in df.index: end_date = next_date(timestamp, days + round(days + days * 2 / 5 + 30)) kdata = Stock1dHfqKdata.query_data( entity_id=entity_id, start_timestamp=timestamp, end_timestamp=end_date, provider=provider, index="timestamp", ) if len(kdata) <= days: logger.warning(f"ignore {timestamp} -> end_timestamp: {end_date}") break close = kdata["close"] change_pct = (close[days] - close[0]) / close[0] records.append({ "entity_id": entity_id, "timestamp": timestamp, f"change_pct": change_pct }) return pd.DataFrame.from_records(records)
def get_top_fund_holding_stocks(timestamp=None, pct=0.3, by=None): if not timestamp: timestamp = now_pd_timestamp() # 季报一般在report_date后1个月内公布,年报2个月内,年报4个月内 # 所以取时间点的最近的两个公布点,保证取到数据 # 所以,这是个滞后的数据,只是为了看个大概,毕竟模糊的正确better than 精确的错误 report_date = get_recent_report_date(timestamp, 1) fund_cap_df = FundStock.query_data( filters=[ FundStock.report_date >= report_date, FundStock.timestamp <= timestamp, ], columns=["stock_id", "market_cap"], ) fund_cap_df = fund_cap_df.groupby( "stock_id")["market_cap"].sum().sort_values(ascending=False) # 直接根据持有市值返回 if not by: s = fund_cap_df.iloc[:int(len(fund_cap_df) * pct)] return s.to_frame() # 按流通盘比例 if by == "trading": columns = ["entity_id", "circulating_market_cap"] # 按市值比例 elif by == "all": columns = ["entity_id", "market_cap"] entity_ids = fund_cap_df.index.tolist() start_timestamp = next_date(timestamp, -30) cap_df = StockValuation.query_data( entity_ids=entity_ids, filters=[ StockValuation.timestamp >= start_timestamp, StockValuation.timestamp <= timestamp, ], columns=columns, ) if by == "trading": cap_df = cap_df.rename(columns={"circulating_market_cap": "cap"}) elif by == "all": cap_df = cap_df.rename(columns={"market_cap": "cap"}) cap_df = cap_df.groupby("entity_id").mean() result_df = pd.concat([cap_df, fund_cap_df], axis=1, join="inner") result_df["pct"] = result_df["market_cap"] / result_df["cap"] pct_df = result_df["pct"].sort_values(ascending=False) s = pct_df.iloc[:int(len(pct_df) * pct)] return s.to_frame()
def get_top_performance_entities( entity_type="stock", start_timestamp=None, end_timestamp=None, pct=0.1, return_type=None, adjust_type: Union[AdjustType, str] = None, entity_filters=None, kdata_filters=None, show_name=False, list_days=None, entity_provider=None, data_provider=None, ): if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) if not entity_filters: entity_filters = [] if list_days: entity_schema = get_entity_schema(entity_type=entity_type) list_date = next_date(start_timestamp, -list_days) entity_filters += [entity_schema.list_date <= list_date] filter_entities = get_entity_ids( provider=entity_provider, entity_type=entity_type, filters=entity_filters, ) if not filter_entities: logger.warning(f"no entities selected") return None, None if not kdata_filters: kdata_filters = [] kdata_filters = kdata_filters + [ data_schema.entity_id.in_(filter_entities) ] return get_top_entities( data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column="close", pct=pct, method=WindowMethod.change, return_type=return_type, kdata_filters=kdata_filters, show_name=show_name, data_provider=data_provider, )
def get_top_performance_entities( entity_type="stock", start_timestamp=None, end_timestamp=None, pct=0.1, return_type=None, adjust_type: Union[AdjustType, str] = None, filters=None, show_name=False, list_days=None, entity_provider=None, data_provider=None, ): if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) if list_days: entity_schema = get_entity_schema(entity_type=entity_type) list_date = next_date(start_timestamp, -list_days) ignore_entities = get_entity_ids( provider=entity_provider, entity_type=entity_type, filters=[entity_schema.list_date >= list_date], ) if ignore_entities: logger.info(f"ignore size: {len(ignore_entities)}") logger.info(f"ignore entities: {ignore_entities}") f = [data_schema.entity_id.notin_(ignore_entities)] if filters: filters = filters + f else: filters = f return get_top_entities( data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column="close", pct=pct, method=WindowMethod.change, return_type=return_type, filters=filters, show_name=show_name, data_provider=data_provider, )
def get_entity_ids_by_filter(provider="em", ignore_st=True, ignore_new_stock=True, target_date=None, entity_schema=Stock, entity_ids=None): filters = [] if ignore_new_stock: if not target_date: target_date = current_date() pre_year = next_date(target_date, -365) filters += [entity_schema.timestamp <= pre_year] if ignore_st: filters += [ entity_schema.name.not_like("%退%"), entity_schema.name.not_like("%ST%"), entity_schema.name.not_like("%*ST%"), ] return get_entity_ids(provider=provider, entity_schema=entity_schema, filters=filters, entity_ids=entity_ids)
def get_entity_list_by_cap(timestamp, cap_start, cap_end, entity_type="stock", provider=None, adjust_type=None, retry_times=20): if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) kdata_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=adjust_type) df = kdata_schema.query_data( provider=provider, filters=[kdata_schema.timestamp == to_pd_timestamp(timestamp)], index="entity_id", ) if pd_is_not_null(df): df["cap"] = df["turnover"] / df["turnover_rate"] df_result = df.copy() if cap_start: df_result = df_result.loc[(df["cap"] >= cap_start)] if cap_end: df_result = df_result.loc[(df["cap"] <= cap_end)] return df_result.index.tolist() else: if retry_times == 0: return [] return get_entity_list_by_cap( timestamp=next_date(timestamp, 1), cap_start=cap_start, cap_end=cap_end, entity_type=entity_type, provider=provider, adjust_type=adjust_type, retry_times=retry_times - 1, )
def get_top_performance_entities(entity_type='stock', start_timestamp=None, end_timestamp=None, pct=0.1, return_type=None, adjust_type: Union[AdjustType, str] = None, filters=None, show_name=False, list_days=None): if not adjust_type and entity_type == 'stock': adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) if list_days: entity_schema = get_entity_schema(entity_type=entity_type) list_date = next_date(start_timestamp, -list_days) ignore_entities = get_entity_ids(entity_type=entity_type, filters=[entity_schema.list_date >= list_date]) if ignore_entities: logger.info(f'ignore size: {len(ignore_entities)}') logger.info(f'ignore entities: {ignore_entities}') f = [data_schema.entity_id.notin_(ignore_entities)] if filters: filters = filters + f else: filters = f return get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column='close', pct=pct, method=WindowMethod.change, return_type=return_type, filters=filters, show_name=show_name)
def top_dragon_and_tiger(data_provider="em", start_timestamp="2021-01-01", end_timestamp="2022-01-01"): dfs = [] for start_date, end_date, df in get_top_performance_by_month( start_timestamp=start_timestamp, end_timestamp=end_timestamp, list_days=250, data_provider=data_provider): pre_month_start = pre_month_start_date(start_date) for entity_id in df.index[:30]: players = get_players( entity_id=entity_id, start_timestamp=next_date(start_date, 15), end_timestamp=end_timestamp, provider=data_provider, direction="in", ) print(players) dfs.append(players) player_df = pd.concat(dfs, sort=True) return player_df.sort_index(level=[0, 1])
def record_block(): while True: email_action = EmailInformer() try: Block.record_data(provider='eastmoney', force_update=False) # 只抓取概念行情 df = Block.query_data(filters=[Block.category == BlockCategory.concept.value], index='entity_id') entity_ids = df.index.tolist() Block1dKdata.record_data(provider='em', entity_ids=entity_ids) # 报告新概念 list_date = next_date(current_date(), -90) df = Block.query_data(filters=[Block.category == BlockCategory.concept.value, Block.list_date >= list_date], index='entity_id') # add them to eastmoney try: try: eastmoneypy.create_group('概念') except: pass for code in df['code']: eastmoneypy.add_to_group(code, group_name='概念', entity_type='block') except Exception as e: email_action.send_message(zvt_config['email_username'], f'report_concept error', 'report_concept error:{}'.format(e)) email_action.send_message(zvt_config['email_username'], 'record block finished', '') break except Exception as e: msg = f'record block error:{e}' logger.exception(msg) email_action.send_message(zvt_config['email_username'], 'record block error', msg) time.sleep(60)
def report_top_stats( entity_provider, data_provider, periods=[7, 30, 180, 365], ignore_new_stock=True, entity_type="stock", adjust_type=None, top_count=30, turnover_threshold=100000000, turnover_rate_threshold=0.02, em_group_over_write=True, ): if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) kdata_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) entity_schema = get_entity_schema(entity_type=entity_type) latest_day = kdata_schema.query_data(provider=data_provider, order=kdata_schema.timestamp.desc(), limit=1, return_type="domain") current_timestamp = latest_day[0].timestamp email_action = EmailInformer() # 至少上市一年 filter_entity_ids = [] if ignore_new_stock: pre_year = next_date(current_timestamp, -365) entity_ids = get_entity_ids( provider=entity_provider, entity_schema=entity_schema, filters=[entity_schema.timestamp <= pre_year]) if not entity_ids: msg = f"{entity_type} no entity_ids listed one year" logger.error(msg) email_action.send_message(zvt_config["email_username"], "report_top_stats error", msg) return filter_entity_ids = entity_ids filter_turnover_df = kdata_schema.query_data( filters=[ kdata_schema.turnover >= turnover_threshold, kdata_schema.turnover_rate >= turnover_rate_threshold, ], provider=data_provider, start_timestamp=current_timestamp, index="entity_id", columns=["entity_id", "code"], ) if filter_entity_ids: filter_entity_ids = set(filter_entity_ids) & set( filter_turnover_df.index.tolist()) else: filter_entity_ids = filter_turnover_df.index.tolist() if not filter_entity_ids: msg = f"{entity_type} no entity_ids selected" logger.error(msg) email_action.send_message(zvt_config["email_username"], "report_top_stats error", msg) return logger.info( f"{entity_type} filter_entity_ids size: {len(filter_entity_ids)}") filters = [kdata_schema.entity_id.in_(filter_entity_ids)] stats = [] ups = [] downs = [] for period in periods: start = next_date(current_timestamp, -period) df, _ = get_top_performance_entities( entity_type=entity_type, start_timestamp=start, filters=filters, pct=1, show_name=True, entity_provider=entity_provider, data_provider=data_provider, ) df.rename(columns={"score": f"score_{period}"}, inplace=True) ups.append(tabulate(df.iloc[:top_count], headers="keys")) downs.append(tabulate(df.iloc[-top_count:], headers="keys")) stats.append(tabulate(df.describe(), headers="keys")) # 最近一个月和一周最靓仔的 if period == 7 or period == 30: try: codes = [ decode_entity_id(entity_id)[2] for entity_id in df.index[:top_count] ] add_to_eastmoney(codes=codes, entity_type=entity_type, group="最靓仔", over_write=em_group_over_write) except Exception as e: logger.exception(e) email_action.send_message( zvt_config["email_username"], f"report_top_stats error", "report_top_stats error:{}".format(e)) # 一年内跌幅最大的 if period == 365: try: codes = [ decode_entity_id(entity_id)[2] for entity_id in df.index[-top_count:] ] add_to_eastmoney(codes=codes, entity_type=entity_type, group="谁有我惨", over_write=em_group_over_write) except Exception as e: logger.exception(e) email_action.send_message( zvt_config["email_username"], f"report_top_stats error", "report_top_stats error:{}".format(e)) msg = "\n" for s in stats: msg = msg + s + "\n" email_action.send_message(zvt_config["email_username"], f"{current_timestamp} {entity_type}统计报告", msg) msg = "\n" for up in ups: msg = msg + up + "\n" email_action.send_message(zvt_config["email_username"], f"{current_timestamp} {entity_type}涨幅统计报告", msg) msg = "\n" for down in downs: msg = msg + down + "\n" email_action.send_message(zvt_config["email_username"], f"{current_timestamp} {entity_type}跌幅统计报告", msg)
def record_dragon_tiger(data_provider="em", entity_provider="em", sleeping_time=2): # 龙虎榜数据 run_data_recorder( domain=DragonAndTiger, data_provider=data_provider, entity_provider=entity_provider, day_data=True, sleeping_time=sleeping_time, ) email_action = EmailInformer() # recent year start_timestamp = next_date(current_date(), -400) # 最近一年牛x的营业部 players = get_big_players(start_timestamp=start_timestamp) # 最近30天有牛x的营业部上榜的个股 recent_date = next_date(current_date(), -30) selected = [] for player in players: filters = [ or_( and_(DragonAndTiger.dep1 == player, DragonAndTiger.dep1_rate >= 5), and_(DragonAndTiger.dep2 == player, DragonAndTiger.dep2_rate >= 5), and_(DragonAndTiger.dep3 == player, DragonAndTiger.dep3_rate >= 5), and_(DragonAndTiger.dep4 == player, DragonAndTiger.dep4_rate >= 5), and_(DragonAndTiger.dep5 == player, DragonAndTiger.dep5_rate >= 5), ) ] df = DragonAndTiger.query_data( start_timestamp=recent_date, filters=filters, columns=[ DragonAndTiger.timestamp, DragonAndTiger.entity_id, DragonAndTiger.code, DragonAndTiger.name ], index="entity_id", ) selected = selected + df.index.tolist() if selected: selected = list(set(selected)) target_date = get_latest_kdata_date(provider=data_provider, entity_type="stock", adjust_type="hfq") df = Stock1dHfqKdata.query_data( provider=data_provider, entity_ids=selected, filters=[ Stock1dHfqKdata.turnover_rate > 0.02, Stock1dHfqKdata.timestamp == to_pd_timestamp(target_date), Stock1dHfqKdata.turnover > 300000000, ], index=["entity_id"], ) inform( action=email_action, entity_ids=df.index.tolist(), target_date=current_date(), title="report 龙虎榜", entity_provider=entity_provider, entity_type="stock", em_group="重要指数", em_group_over_write=False, )
def report_top_entities( entity_provider, data_provider, periods=None, ignore_new_stock=True, ignore_st=True, entity_ids=None, entity_type="stock", adjust_type=None, top_count=30, turnover_threshold=100000000, turnover_rate_threshold=0.02, informer: EmailInformer = None, em_group=None, em_group_over_write=True, return_type=TopType.positive, ): error_count = 0 while error_count <= 10: try: if periods is None: periods = [7, 30, 365] if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) kdata_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) entity_schema = get_entity_schema(entity_type=entity_type) target_date = get_latest_kdata_date(provider=data_provider, entity_type=entity_type, adjust_type=adjust_type) filter_entity_ids = get_entity_ids_by_filter( provider=entity_provider, ignore_st=ignore_st, ignore_new_stock=ignore_new_stock, entity_schema=entity_schema, target_date=target_date, entity_ids=entity_ids, ) if not filter_entity_ids: msg = f"{entity_type} no entity_ids selected" logger.error(msg) informer.send_message(zvt_config["email_username"], "report_top_stats error", msg) return filter_turnover_df = kdata_schema.query_data( filters=[ kdata_schema.turnover >= turnover_threshold, kdata_schema.turnover_rate >= turnover_rate_threshold, ], provider=data_provider, start_timestamp=target_date, index="entity_id", columns=["entity_id", "code"], ) if filter_entity_ids: filter_entity_ids = set(filter_entity_ids) & set( filter_turnover_df.index.tolist()) else: filter_entity_ids = filter_turnover_df.index.tolist() if not filter_entity_ids: msg = f"{entity_type} no entity_ids selected" logger.error(msg) informer.send_message(zvt_config["email_username"], "report_top_stats error", msg) return logger.info( f"{entity_type} filter_entity_ids size: {len(filter_entity_ids)}" ) filters = [kdata_schema.entity_id.in_(filter_entity_ids)] selected = [] for i, period in enumerate(periods): interval = period if target_date.weekday() + 1 < interval: interval = interval + 2 start = next_date(target_date, -interval) positive_df, negative_df = get_top_performance_entities( entity_type=entity_type, start_timestamp=start, kdata_filters=filters, pct=1, show_name=True, entity_provider=entity_provider, data_provider=data_provider, return_type=return_type, ) if return_type == TopType.positive: df = positive_df else: df = negative_df selected = selected + df.index[:top_count].tolist() selected = list(dict.fromkeys(selected)) inform( informer, entity_ids=selected, target_date=target_date, title=f"{entity_type} {em_group}({len(selected)})", entity_provider=entity_provider, entity_type=entity_type, em_group=em_group, em_group_over_write=em_group_over_write, ) break except Exception as e: logger.exception("report error:{}".format(e)) time.sleep(30) error_count = error_count + 1
def report_vol_up(): while True: error_count = 0 email_action = EmailInformer() try: # 抓取k线数据 # StockTradeDay.record_data(provider='joinquant') # Stock1dKdata.record_data(provider='joinquant') latest_day: Stock1dHfqKdata = Stock1dHfqKdata.query_data( order=Stock1dHfqKdata.timestamp.desc(), limit=1, return_type='domain') target_date = latest_day[0].timestamp start_timestamp = next_date(target_date, -50) # 成交量 vol_df = get_top_volume_entities(entity_type='stock', start_timestamp=start_timestamp, end_timestamp=target_date, pct=0.4) current_entity_pool = vol_df.index.tolist() # 计算均线 start = '2019-01-01' my_selector = TargetSelector(start_timestamp=start, end_timestamp=target_date, select_mode=SelectMode.condition_or) # add the factors factor1 = VolumeUpMaFactor(entity_ids=current_entity_pool, start_timestamp=start, end_timestamp=target_date, windows=[120, 250], over_mode='or') my_selector.add_factor(factor1) my_selector.run() long_stocks = my_selector.get_open_long_targets( timestamp=target_date) msg = 'no targets' if long_stocks: stocks = get_entities(provider='joinquant', entity_schema=Stock, entity_ids=long_stocks, return_type='domain') # add them to eastmoney try: try: eastmoneypy.del_group('tech') except: pass eastmoneypy.create_group('tech') for stock in stocks: eastmoneypy.add_to_group(stock.code, group_name='tech') except Exception as e: email_action.send_message( zvt_config['email_username'], f'report_vol_up error', 'report_vol_up error:{}'.format(e)) infos = stocks_with_info(stocks) msg = '\n'.join(infos) + '\n' logger.info(msg) email_action.send_message(zvt_config['email_username'], f'{target_date} 改进版放量突破(半)年线选股结果', msg) break except Exception as e: logger.exception('report_vol_up error:{}'.format(e)) time.sleep(60 * 3) error_count = error_count + 1 if error_count == 10: email_action.send_message(zvt_config['email_username'], f'report_vol_up error', 'report_vol_up error:{}'.format(e))
def report_targets( factor_cls: Type[Factor], entity_provider, data_provider, title, entity_type="stock", informer: EmailInformer = None, em_group=None, em_group_over_write=True, filter_by_volume=True, adjust_type=None, start_timestamp="2019-01-01", **factor_kv, ): logger.info( f"entity_provider: {entity_provider}, data_provider: {data_provider}, entity_type: {entity_type}, start_timestamp: {start_timestamp}" ) error_count = 0 while error_count <= 10: try: if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) target_date = get_latest_kdata_date(provider=data_provider, entity_type=entity_type, adjust_type=adjust_type) logger.info(f"target_date :{target_date}") current_entity_pool = None if filter_by_volume: # 成交量 vol_df = get_top_volume_entities( entity_type=entity_type, start_timestamp=next_date(target_date, -30), end_timestamp=target_date, adjust_type=adjust_type, pct=0.4, data_provider=data_provider, ) current_entity_pool = vol_df.index.tolist() logger.info( f"current_entity_pool({len(current_entity_pool)}): {current_entity_pool}" ) kdata_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=adjust_type) filters = [] if "turnover_threshold" in factor_kv: filters = filters + [ kdata_schema.turnover >= factor_kv.get("turnover_threshold") ] if "turnover_rate_threshold" in factor_kv: filters = filters + [ kdata_schema.turnover_rate >= factor_kv.get("turnover_rate_threshold") ] if filters: filters = filters + [kdata_schema.timestamp == target_date] kdata_df = kdata_schema.query_data( provider=data_provider, filters=filters, columns=["entity_id", "timestamp"], index="entity_id") if current_entity_pool: current_entity_pool = set(current_entity_pool) & set( kdata_df.index.tolist()) else: current_entity_pool = kdata_df.index.tolist() if "entity_ids" in factor_kv: if current_entity_pool: current_entity_pool = set(current_entity_pool) & set( factor_kv.pop("entity_ids")) else: current_entity_pool = set(factor_kv.pop("entity_ids")) # add the factor my_selector = TargetSelector(start_timestamp=start_timestamp, end_timestamp=target_date, select_mode=SelectMode.condition_or) entity_schema = get_entity_schema(entity_type=entity_type) tech_factor = factor_cls( entity_schema=entity_schema, entity_provider=entity_provider, provider=data_provider, entity_ids=current_entity_pool, start_timestamp=start_timestamp, end_timestamp=target_date, adjust_type=adjust_type, **factor_kv, ) my_selector.add_factor(tech_factor) my_selector.run() long_stocks = my_selector.get_open_long_targets( timestamp=target_date) inform( informer, entity_ids=long_stocks, target_date=target_date, title=title, entity_provider=entity_provider, entity_type=entity_type, em_group=em_group, em_group_over_write=em_group_over_write, ) break except Exception as e: logger.exception("report error:{}".format(e)) time.sleep(60 * 3) error_count = error_count + 1 if error_count == 10: informer.send_message( zvt_config["email_username"], f"report {entity_type}{factor_cls.__name__} error", f"report {entity_type}{factor_cls.__name__} error: {e}", )
def report_targets( factor_cls: Type[Factor], entity_provider, data_provider, title, entity_type="stock", em_group=None, em_group_over_write=True, filter_by_volume=True, adjust_type=None, start_timestamp="2019-01-01", **factor_kv, ): logger.info( f"entity_provider: {entity_provider}, data_provider: {data_provider}, entity_type: {entity_type}, start_timestamp: {start_timestamp}" ) error_count = 0 while error_count <= 10: email_action = EmailInformer() try: if entity_type == "stock" and not adjust_type: adjust_type = AdjustType.hfq target_date = get_latest_kdata_date(provider=data_provider, entity_type=entity_type, adjust_type=adjust_type) logger.info(f"target_date :{target_date}") current_entity_pool = None if filter_by_volume: # 成交量 vol_df = get_top_volume_entities( entity_type=entity_type, start_timestamp=next_date(target_date, -30), end_timestamp=target_date, adjust_type=adjust_type, pct=0.4, ) current_entity_pool = vol_df.index.tolist() logger.info( f"current_entity_pool({len(current_entity_pool)}): {current_entity_pool}" ) # add the factor my_selector = TargetSelector(start_timestamp=start_timestamp, end_timestamp=target_date, select_mode=SelectMode.condition_or) entity_schema = get_entity_schema(entity_type=entity_type) tech_factor = factor_cls( entity_schema=entity_schema, entity_provider=entity_provider, provider=data_provider, entity_ids=current_entity_pool, start_timestamp=start_timestamp, end_timestamp=target_date, adjust_type=adjust_type, **factor_kv, ) my_selector.add_factor(tech_factor) my_selector.run() long_stocks = my_selector.get_open_long_targets( timestamp=target_date) msg = "no targets" if long_stocks: entities = get_entities(provider=entity_provider, entity_type=entity_type, entity_ids=long_stocks, return_type="domain") if em_group: try: codes = [entity.code for entity in entities] add_to_eastmoney(codes=codes, entity_type=entity_type, group=em_group, over_write=em_group_over_write) except Exception as e: email_action.send_message( zvt_config["email_username"], f"report {entity_type}{factor_cls.__name__} error", f"report {entity_type}{factor_cls.__name__} error: {e}", ) infos = [ f"{entity.name}({entity.code})" for entity in entities ] msg = "\n".join(infos) + "\n" logger.info(msg) email_action.send_message(zvt_config["email_username"], f"{target_date} {title}", msg) break except Exception as e: logger.exception("report error:{}".format(e)) time.sleep(60 * 3) error_count = error_count + 1 if error_count == 10: email_action.send_message( zvt_config["email_username"], f"report {entity_type}{factor_cls.__name__} error", f"report {entity_type}{factor_cls.__name__} error: {e}", )