def get_trading_signals_figure(order_reader: OrderReader, entity_id: str, start_timestamp=None, end_timestamp=None, adjust_type=None): entity_type, _, _ = decode_entity_id(entity_id) data_schema = get_kdata_schema(entity_type=entity_type, level=order_reader.level, adjust_type=adjust_type) if not start_timestamp: start_timestamp = order_reader.start_timestamp if not end_timestamp: end_timestamp = order_reader.end_timestamp kdata_reader = DataReader( entity_ids=[entity_id], data_schema=data_schema, entity_schema=zvt_context.tradable_schema_map.get(entity_type), start_timestamp=start_timestamp, end_timestamp=end_timestamp, level=order_reader.level, ) # generate the annotation df order_reader.move_on(timeout=0) df = order_reader.data_df.copy() df = df[df.entity_id == entity_id].copy() if pd_is_not_null(df): df["value"] = df["order_price"] df["flag"] = df["order_type"].apply(lambda x: order_type_flag(x)) df["color"] = df["order_type"].apply(lambda x: order_type_color(x)) print(df.tail()) drawer = Drawer(main_df=kdata_reader.data_df, annotation_df=df) return drawer.draw_kline(show=False, height=800)
def get_performance( entity_ids, start_timestamp=None, end_timestamp=None, adjust_type: Union[AdjustType, str] = None, data_provider=None, ): entity_type, _, _ = decode_entity_id(entity_ids[0]) if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) result, _ = get_top_entities( data_schema=data_schema, column="close", start_timestamp=start_timestamp, end_timestamp=end_timestamp, pct=1, method=WindowMethod.change, return_type=TopType.positive, kdata_filters=[data_schema.entity_id.in_(entity_ids)], data_provider=data_provider, ) return result
def to_em_sec_id(entity_id): entity_type, exchange, code = decode_entity_id(entity_id) # 主力合约 if entity_type == "future" and code[-1].isalpha(): code = code + "m" if entity_type == "currency" and "CNYC" in code: return f"120.{code}" return f"{to_em_entity_flag(exchange)}.{code}"
def get_performance(entity_ids, start_timestamp=None, end_timestamp=None, adjust_type: Union[AdjustType, str] = None): entity_type, _, _ = decode_entity_id(entity_ids[0]) if not adjust_type and entity_type == 'stock': adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) result, _ = get_top_entities(data_schema=data_schema, column='close', start_timestamp=start_timestamp, end_timestamp=end_timestamp, pct=1, method='change', return_type='positive', filters=[data_schema.entity_id.in_(entity_ids)]) return result
def to_em_fc(entity_id): entity_type, exchange, code = decode_entity_id(entity_id) if entity_type == "stock": if exchange == "sh": return f"{code}01" if exchange == "sz": return f"{code}02" if entity_type == "stockhk": return code if entity_type == "stockus": if exchange == "nyse": return f"{code}.N" if exchange == "nasdaq": return f"{code}.O"
def composite_all(data_schema, column, timestamp, entity_ids=None, filters=None): if type(column) is not str: column = column.name if filters: filters.append([data_schema.timestamp == to_pd_timestamp(timestamp)]) else: filters = [data_schema.timestamp == to_pd_timestamp(timestamp)] df = data_schema.query_data(entity_ids=entity_ids, columns=['entity_id', 'timestamp', column], filters=filters, index='entity_id') entity_type, exchange, _ = decode_entity_id(df['entity_id'].iloc[0]) pie_df = pd.DataFrame(columns=df.index, data=[df[column].tolist()]) pie_df['entity_id'] = f'{entity_type}_{exchange}_{column}' pie_df['timestamp'] = timestamp drawer = Drawer(main_df=pie_df) drawer.draw_pie(show=True)
def to_em_fc(entity_id): entity_type, exchange, code = decode_entity_id(entity_id) if entity_type == 'stock': if exchange == 'sh': return f'{code}01' if exchange == 'sz': return f'{code}02' if entity_type == 'stockhk': return code if entity_type == 'stockus': if exchange == 'nyse': return f'{code}.N' if exchange == 'nasdaq': return f'{code}.O'
def on_trading_signal(self, trading_signal: TradingSignal): entity_id = trading_signal.entity_id happen_timestamp = trading_signal.happen_timestamp order_type = AccountService.trading_signal_to_order_type( trading_signal.trading_signal_type) trading_level = trading_signal.trading_level.value if order_type: try: kdata = get_kdata( provider=self.provider, entity_id=entity_id, level=trading_level, start_timestamp=happen_timestamp, end_timestamp=happen_timestamp, limit=1, adjust_type=self.adjust_type, ) except Exception as e: self.logger.error(e) raise WrongKdataError("could not get kdata") if pd_is_not_null(kdata): entity_type, _, _ = decode_entity_id(kdata["entity_id"][0]) the_price = kdata["close"][0] if the_price: self.order( entity_id=entity_id, current_price=the_price, current_timestamp=happen_timestamp, order_pct=trading_signal.position_pct, order_money=trading_signal.order_money, order_type=order_type, ) else: self.logger.warning( "ignore trading signal,wrong kdata,entity_id:{},timestamp:{},kdata:{}" .format(entity_id, happen_timestamp, kdata.to_dict(orient="records"))) else: self.logger.warning( "ignore trading signal,could not get kdata,entity_id:{},timestamp:{}" .format(entity_id, happen_timestamp))
def get_kdata(region: Region, entity_id=None, entity_ids=None, level=IntervalLevel.LEVEL_1DAY.value, provider: Provider=Provider.Default, columns=None, return_type='df', start_timestamp=None, end_timestamp=None, filters=None, session=None, order=None, limit=None, index='timestamp', adjust_type: AdjustType = None): assert not entity_id or not entity_ids if entity_ids: entity_id = entity_ids[0] else: entity_ids = [entity_id] entity_type, _, _ = decode_entity_id(entity_id) data_schema: Mixin = get_kdata_schema(entity_type, level=level, adjust_type=adjust_type) return data_schema.query_data(region=region, entity_ids=entity_ids, level=level, provider=provider, columns=columns, return_type=return_type, start_timestamp=start_timestamp, end_timestamp=end_timestamp, filters=filters, session=session, order=order, limit=limit, index=index)
def get_basic_info(entity_id): entity_type, exchange, code = decode_entity_id(entity_id) if entity_type == "stock": url = "https://emh5.eastmoney.com/api/GongSiGaiKuang/GetJiBenZiLiao" result_field = "JiBenZiLiao" elif entity_type == "stockus": url = "https://emh5.eastmoney.com/api/MeiGu/GaiKuang/GetZhengQuanZiLiao" result_field = "ZhengQuanZiLiao" elif entity_type == "stockhk": url = "https://emh5.eastmoney.com/api/GangGu/GaiKuang/GetZhengQuanZiLiao" result_field = "ZhengQuanZiLiao" else: assert False data = {"fc": to_em_fc(entity_id=entity_id), "color": "w"} resp = requests.post(url=url, json=data, headers=DEFAULT_HEADER) resp.raise_for_status() return resp.json()["Result"][result_field]
def get_kdata(entity_id, level=IntervalLevel.LEVEL_1DAY, adjust_type=AdjustType.qfq, limit=10000): entity_type, exchange, code = decode_entity_id(entity_id) level = IntervalLevel(level) sec_id = to_em_sec_id(entity_id) fq_flag = to_em_fq_flag(adjust_type) level_flag = to_em_level_flag(level) # f131 结算价 # f133 持仓 # 目前未获取 url = f"https://push2his.eastmoney.com/api/qt/stock/kline/get?secid={sec_id}&klt={level_flag}&fqt={fq_flag}&lmt={limit}&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1" resp = requests.get(url, headers=DEFAULT_HEADER) resp.raise_for_status() results = resp.json() data = results["data"] kdatas = [] if data: klines = data["klines"] name = data["name"] for result in klines: # "2000-01-28,1005.26,1012.56,1173.12,982.13,3023326,3075552000.00" # "2021-08-27,19.39,20.30,20.30,19.25,1688497,3370240912.00,5.48,6.01,1.15,3.98,0,0,0" # time,open,close,high,low,volume,turnover # "2022-04-13,10708,10664,10790,10638,402712,43124771328,1.43,0.57,60,0.00,4667112399583576064,4690067230254170112,1169270784" fields = result.split(",") the_timestamp = to_pd_timestamp(fields[0]) the_id = generate_kdata_id(entity_id=entity_id, timestamp=the_timestamp, level=level) open = to_float(fields[1]) close = to_float(fields[2]) high = to_float(fields[3]) low = to_float(fields[4]) volume = to_float(fields[5]) turnover = to_float(fields[6]) # 7 振幅 change_pct = value_to_pct(to_float(fields[8])) # 9 变动 turnover_rate = value_to_pct(to_float(fields[10])) kdatas.append( dict( id=the_id, timestamp=the_timestamp, entity_id=entity_id, provider="em", code=code, name=name, level=level.value, open=open, close=close, high=high, low=low, volume=volume, turnover=turnover, turnover_rate=turnover_rate, change_pct=change_pct, ) ) if kdatas: df = pd.DataFrame.from_records(kdatas) return df
def get_kdata(entity_id, level=IntervalLevel.LEVEL_1DAY, adjust_type=AdjustType.qfq, limit=10000): entity_type, exchange, code = decode_entity_id(entity_id) level = IntervalLevel(level) sec_id = to_em_sec_id(entity_id) fq_flag = to_em_fq_flag(adjust_type) level_flag = to_em_level_flag(level) url = f'https://push2his.eastmoney.com/api/qt/stock/kline/get?secid={sec_id}&klt={level_flag}&fqt={fq_flag}&lmt={limit}&end=20500000&iscca=1&fields1=f1,f2,f3,f4,f5,f6,f7,f8&fields2=f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61,f62,f63,f64&ut=f057cbcbce2a86e2866ab8877db1d059&forcect=1' resp = requests.get(url, headers=DEFAULT_HEADER) resp.raise_for_status() results = resp.json() data = results['data'] kdatas = [] if data: klines = data['klines'] name = data['name'] # TODO: ignore the last unfinished kdata now,could control it better if need for result in klines[:-1]: # "2000-01-28,1005.26,1012.56,1173.12,982.13,3023326,3075552000.00" # "2021-08-27,19.39,20.30,20.30,19.25,1688497,3370240912.00,5.48,6.01,1.15,3.98,0,0,0" # time,open,close,high,low,volume,turnover fields = result.split(',') the_timestamp = to_pd_timestamp(fields[0]) the_id = generate_kdata_id(entity_id=entity_id, timestamp=the_timestamp, level=level) open = to_float(fields[1]) close = to_float(fields[2]) high = to_float(fields[3]) low = to_float(fields[4]) volume = to_float(fields[5]) turnover = to_float(fields[6]) # 7 振幅 change_pct = value_to_pct(to_float(fields[8])) # 9 变动 turnover_rate = value_to_pct(to_float(fields[10])) kdatas.append( dict(id=the_id, timestamp=the_timestamp, entity_id=entity_id, provider='em', code=code, name=name, level=level.value, open=open, close=close, high=high, low=low, volume=volume, turnover=turnover, turnover_rate=turnover_rate, change_pct=change_pct)) if kdatas: df = pd.DataFrame.from_records(kdatas) return df
def on_trading_close(self, timestamp): self.logger.info("on_trading_close:{}".format(timestamp)) # remove the empty position self.account.positions = [ position for position in self.account.positions if position.long_amount > 0 or position.short_amount > 0 ] # clear the data which need recomputing the_id = "{}_{}".format(self.trader_name, to_time_str(timestamp, TIME_FORMAT_ISO8601)) self.account.value = 0 self.account.all_value = 0 for position in self.account.positions: entity_type, _, _ = decode_entity_id(position.entity_id) data_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=self.adjust_type) kdata = get_kdata( provider=self.provider, level=IntervalLevel.LEVEL_1DAY, entity_id=position.entity_id, order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1, adjust_type=self.adjust_type, ) closing_price = kdata["close"][0] position.available_long = position.long_amount position.available_short = position.short_amount if closing_price: if (position.long_amount is not None) and position.long_amount > 0: position.value = position.long_amount * closing_price self.account.value += position.value elif (position.short_amount is not None) and position.short_amount > 0: position.value = 2 * (position.short_amount * position.average_short_price) position.value -= position.short_amount * closing_price self.account.value += position.value # refresh profit position.profit = (closing_price - position.average_long_price ) * position.long_amount position.profit_rate = position.profit / ( position.average_long_price * position.long_amount) else: self.logger.warning( "could not refresh close value for position:{},timestamp:{}" .format(position.entity_id, timestamp)) position.id = "{}_{}_{}".format( self.trader_name, position.entity_id, to_time_str(timestamp, TIME_FORMAT_ISO8601)) position.timestamp = to_pd_timestamp(timestamp) position.account_stats_id = the_id self.account.id = the_id self.account.all_value = self.account.value + self.account.cash self.account.closing = True self.account.timestamp = to_pd_timestamp(timestamp) self.account.profit = ( self.account.all_value - self.account.input_money) / self.account.input_money self.session.add(self.account) self.session.commit() account_info = ( f"on_trading_close,holding size:{len(self.account.positions)} profit:{self.account.profit} input_money:{self.account.input_money} " f"cash:{self.account.cash} value:{self.account.value} all_value:{self.account.all_value}" ) self.logger.info(account_info)
def to_em_sec_id(entity_id): entity_type, exchange, code = decode_entity_id(entity_id) return f'{to_em_entity_flag(exchange)}.{code}'
def draw(self, main_chart='kline', sub_chart='bar', mode='lines', width=None, height=None, title=None, keep_ui_state=True, show=False, **kwargs): if pd_is_not_null(self.sub_data): subplot = True fig = make_subplots(rows=2, cols=1, row_heights=[0.8, 0.2], vertical_spacing=0.08, shared_xaxes=True) sub_traces = [] else: subplot = False fig = go.Figure() traces = [] for entity_id, df in self.main_data.entity_map_df.items(): code = entity_id try: _, _, code = decode_entity_id(entity_id) except Exception: pass if main_chart == 'kline': trace_name = '{}_kdata'.format(code) trace = go.Candlestick(x=df.index, open=df['open'], close=df['close'], low=df['low'], high=df['high'], name=trace_name, **kwargs) traces.append(trace) elif main_chart == 'scatter': for col in df.columns: trace_name = '{}_{}'.format(code, col) ydata = df[col].values.tolist() traces.append(go.Scatter(x=df.index, y=ydata, mode=mode, name=trace_name, **kwargs)) # 绘制指标 factor_df = self.factor_data.entity_map_df.get(entity_id) if pd_is_not_null(factor_df): for col in factor_df.columns: trace_name = '{}_{}'.format(code, col) ydata = factor_df[col].values.tolist() line = go.Scatter(x=df.index, y=ydata, mode=mode, name=trace_name, **kwargs) traces.append(line) if subplot: # 绘制幅图 sub_df = self.sub_data.entity_map_df.get(entity_id) if pd_is_not_null(sub_df): for col in sub_df.columns: trace_name = '{}_{}'.format(code, col) ydata = sub_df[col].values.tolist() def color(i): if i > 0: return 'red' else: return 'green' colors = [color(i) for i in ydata] if sub_chart == 'line': sub_trace = go.Scatter(x=sub_df.index, y=ydata, name=trace_name, yaxis='y2', marker_color=colors) else: sub_trace = go.Bar(x=sub_df.index, y=ydata, name=trace_name, yaxis='y2', marker_color=colors) sub_traces.append(sub_trace) if subplot: fig.add_traces(traces, rows=[1] * len(traces), cols=[1] * len(traces)) fig.add_traces(sub_traces, rows=[2] * len(sub_traces), cols=[1] * len(sub_traces)) else: fig.add_traces(traces) fig.update_layout(self.gen_plotly_layout(width=width, height=height, title=title, keep_ui_state=keep_ui_state, subplot=subplot)) if show: fig.show() else: return fig
def make_traces(self, main_chart=ChartType.kline, sub_chart="bar", yaxis="y", scale_value=None, **kwargs): traces = [] sub_traces = [] for entity_id, df in self.main_data.entity_map_df.items(): df = df.select_dtypes(np.number) df = df.copy() if scale_value: for col in df.columns: first = None for i in range(0, len(df)): first = df[col][i] if first != 0: break if first == 0: continue scale = scale_value / first df[col] = df[col] * scale code = entity_id try: _, _, code = decode_entity_id(entity_id) except Exception: pass # 构造主图 if main_chart == ChartType.bar: for col in df.columns: trace_name = "{}_{}".format(code, col) ydata = df[col].values.tolist() traces.append( go.Bar(x=df.index, y=ydata, name=trace_name, yaxis=yaxis, **kwargs)) elif main_chart == ChartType.kline: trace_name = "{}_kdata".format(code) trace = go.Candlestick( x=df.index, open=df["open"], close=df["close"], low=df["low"], high=df["high"], name=trace_name, yaxis=yaxis, **kwargs, ) traces.append(trace) elif main_chart in [ ChartType.scatter, ChartType.line, ChartType.area ]: mode = _zvt_chart_type_map_scatter_mode.get(main_chart) for col in df.columns: trace_name = "{}_{}".format(code, col) ydata = df[col].values.tolist() traces.append( go.Scatter(x=df.index, y=ydata, mode=mode, name=trace_name, yaxis=yaxis, **kwargs)) elif main_chart == ChartType.histogram: for col in df.columns: trace_name = "{}_{}".format(code, col) x = df[col].tolist() trace = go.Histogram(x=x, name=trace_name, **kwargs) traces.append(trace) annotation = [ dict( entity_id=entity_id, timestamp=x[-1], value=0, flag=f"{trace_name}:{x[-1]}", ) ] annotation_df = pd.DataFrame.from_records( annotation, index=["entity_id", "timestamp"]) if pd_is_not_null(self.annotation_df): self.annotation_df = pd.concat( [self.annotation_df, annotation_df]) else: self.annotation_df = annotation_df elif main_chart == ChartType.pie: for _, row in df.iterrows(): traces.append( go.Pie(name=entity_id, labels=df.columns.tolist(), values=row.tolist(), **kwargs)) else: assert False # 构造主图指标 if self.factor_data_list: for factor_data in self.factor_data_list: if not factor_data.empty(): factor_df = factor_data.entity_map_df.get(entity_id) factor_df = factor_df.select_dtypes(np.number) if pd_is_not_null(factor_df): for col in factor_df.columns: trace_name = "{}_{}".format(code, col) ydata = factor_df[col].values.tolist() line = go.Scatter(x=factor_df.index, y=ydata, mode="lines", name=trace_name, yaxis=yaxis, **kwargs) traces.append(line) # 构造幅图 if self.has_sub_plot(): for sub_data in self.sub_data_list: sub_df = sub_data.entity_map_df.get(entity_id) if pd_is_not_null(sub_df): sub_df = sub_df.select_dtypes(np.number) for col in sub_df.columns: trace_name = "{}_{}".format(code, col) ydata = sub_df[col].values.tolist() def color(i): if i > 0: return "red" else: return "green" colors = [color(i) for i in ydata] the_sub_chart = None if self.sub_col_chart is not None: the_sub_chart = self.sub_col_chart.get(col) if not the_sub_chart: the_sub_chart = sub_chart if the_sub_chart == ChartType.line: sub_trace = go.Scatter( x=sub_df.index, y=ydata, name=trace_name, yaxis="y2", marker=dict(color=colors)) else: sub_trace = go.Bar(x=sub_df.index, y=ydata, name=trace_name, yaxis="y2", marker=dict(color=colors)) sub_traces.append(sub_trace) return traces, sub_traces
def report_top_stats( entity_provider, data_provider, periods=[7, 30, 180, 365], ignore_new_stock=True, entity_type="stock", adjust_type=None, top_count=30, turnover_threshold=100000000, turnover_rate_threshold=0.02, em_group_over_write=True, ): if not adjust_type: adjust_type = default_adjust_type(entity_type=entity_type) kdata_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) entity_schema = get_entity_schema(entity_type=entity_type) latest_day = kdata_schema.query_data(provider=data_provider, order=kdata_schema.timestamp.desc(), limit=1, return_type="domain") current_timestamp = latest_day[0].timestamp email_action = EmailInformer() # 至少上市一年 filter_entity_ids = [] if ignore_new_stock: pre_year = next_date(current_timestamp, -365) entity_ids = get_entity_ids( provider=entity_provider, entity_schema=entity_schema, filters=[entity_schema.timestamp <= pre_year]) if not entity_ids: msg = f"{entity_type} no entity_ids listed one year" logger.error(msg) email_action.send_message(zvt_config["email_username"], "report_top_stats error", msg) return filter_entity_ids = entity_ids filter_turnover_df = kdata_schema.query_data( filters=[ kdata_schema.turnover >= turnover_threshold, kdata_schema.turnover_rate >= turnover_rate_threshold, ], provider=data_provider, start_timestamp=current_timestamp, index="entity_id", columns=["entity_id", "code"], ) if filter_entity_ids: filter_entity_ids = set(filter_entity_ids) & set( filter_turnover_df.index.tolist()) else: filter_entity_ids = filter_turnover_df.index.tolist() if not filter_entity_ids: msg = f"{entity_type} no entity_ids selected" logger.error(msg) email_action.send_message(zvt_config["email_username"], "report_top_stats error", msg) return logger.info( f"{entity_type} filter_entity_ids size: {len(filter_entity_ids)}") filters = [kdata_schema.entity_id.in_(filter_entity_ids)] stats = [] ups = [] downs = [] for period in periods: start = next_date(current_timestamp, -period) df, _ = get_top_performance_entities( entity_type=entity_type, start_timestamp=start, filters=filters, pct=1, show_name=True, entity_provider=entity_provider, data_provider=data_provider, ) df.rename(columns={"score": f"score_{period}"}, inplace=True) ups.append(tabulate(df.iloc[:top_count], headers="keys")) downs.append(tabulate(df.iloc[-top_count:], headers="keys")) stats.append(tabulate(df.describe(), headers="keys")) # 最近一个月和一周最靓仔的 if period == 7 or period == 30: try: codes = [ decode_entity_id(entity_id)[2] for entity_id in df.index[:top_count] ] add_to_eastmoney(codes=codes, entity_type=entity_type, group="最靓仔", over_write=em_group_over_write) except Exception as e: logger.exception(e) email_action.send_message( zvt_config["email_username"], f"report_top_stats error", "report_top_stats error:{}".format(e)) # 一年内跌幅最大的 if period == 365: try: codes = [ decode_entity_id(entity_id)[2] for entity_id in df.index[-top_count:] ] add_to_eastmoney(codes=codes, entity_type=entity_type, group="谁有我惨", over_write=em_group_over_write) except Exception as e: logger.exception(e) email_action.send_message( zvt_config["email_username"], f"report_top_stats error", "report_top_stats error:{}".format(e)) msg = "\n" for s in stats: msg = msg + s + "\n" email_action.send_message(zvt_config["email_username"], f"{current_timestamp} {entity_type}统计报告", msg) msg = "\n" for up in ups: msg = msg + up + "\n" email_action.send_message(zvt_config["email_username"], f"{current_timestamp} {entity_type}涨幅统计报告", msg) msg = "\n" for down in downs: msg = msg + down + "\n" email_action.send_message(zvt_config["email_username"], f"{current_timestamp} {entity_type}跌幅统计报告", msg)