def on_finish(self, timestmap): self.on_trading_finish(timestmap) # show the result if self.draw_result: import plotly.io as pio pio.renderers.default = "browser" reader = AccountStatsReader(trader_names=[self.trader_name]) df = reader.data_df drawer = Drawer(main_data=NormalData( df.copy()[['trader_name', 'timestamp', 'all_value']], category_field='trader_name')) drawer.draw_line(show=True)
def show_industry_composition(entity_ids, timestamp): block_df = Block.query_data(provider='eastmoney', filters=[Block.category == 'industry'], index='entity_id') block_ids = block_df.index.tolist() block_df = BlockStock.query_data(entity_ids=block_ids, filters=[BlockStock.stock_id.in_(entity_ids)]) s = block_df['name'].value_counts() cycle_df = pd.DataFrame(columns=s.index, data=[s.tolist()]) cycle_df['entity_id'] = 'stock_cn_industry' cycle_df['timestamp'] = timestamp drawer = Drawer(main_df=cycle_df) drawer.draw_pie(show=True)
def show_month_performance(): dfs = [] for timestamp, df in got_top_performance_by_month(start_timestamp='2005-01-01', list_days=250): if pd_is_not_null(df): df = df.reset_index(drop=True) df['entity_id'] = 'stock_cn_performance' df['timestamp'] = timestamp dfs.append(df) all_df = pd.concat(dfs) print(all_df) drawer = Drawer(main_df=all_df) drawer.draw_scatter(show=True)
def composite_all(data_schema, column, timestamp, entity_ids=None, filters=None): if type(column) is not str: column = column.name if filters: filters.append([data_schema.timestamp == to_pd_timestamp(timestamp)]) else: filters = [data_schema.timestamp == to_pd_timestamp(timestamp)] df = data_schema.query_data(entity_ids=entity_ids, columns=['entity_id', 'timestamp', column], filters=filters, index='entity_id') entity_type, exchange, _ = decode_entity_id(df['entity_id'].iloc[0]) pie_df = pd.DataFrame(columns=df.index, data=[df[column].tolist()]) pie_df['entity_id'] = f'{entity_type}_{exchange}_{column}' pie_df['timestamp'] = timestamp drawer = Drawer(main_df=pie_df) drawer.draw_pie(show=True)
def show_industry_composition(entity_ids, timestamp): block_df = Block.query_data(provider="eastmoney", filters=[Block.category == "industry"], index="entity_id") block_ids = block_df.index.tolist() block_df = BlockStock.query_data( entity_ids=block_ids, filters=[BlockStock.stock_id.in_(entity_ids)]) s = block_df["name"].value_counts() cycle_df = pd.DataFrame(columns=s.index, data=[s.tolist()]) cycle_df["entity_id"] = "stock_cn_industry" cycle_df["timestamp"] = timestamp drawer = Drawer(main_df=cycle_df) drawer.draw_pie(show=True)
def draw(self, render='html', file_name=None, width=None, height=None, title=None, keep_ui_state=True, annotation_df=None, target_type: TargetType = TargetType.open_long): if target_type == TargetType.open_long: df = self.open_long_df.copy() elif target_type == TargetType.open_short: df = self.open_short_df.copy() df['target_type'] = target_type.value if pd_is_not_null(df): df = df.reset_index(drop=False) drawer = Drawer(df) drawer.draw_table(width=width, height=height, title=title, keep_ui_state=keep_ui_state)
def draw_result(self, entity_id): if self.label_method == "raw": df = self.kdata_df.loc[[entity_id], ["close"]].copy() pred_df = self.pred_y.to_frame(name="pred_close") pred_df = pred_df.loc[[entity_id], :].shift(self.predict_steps) drawer = Drawer( main_df=df, factor_df_list=[pred_df], ) drawer.draw_line(show=True) else: pred_df = self.pred_y.to_frame(name="pred_result").loc[[entity_id], :] df = self.testing_y.to_frame(name="real_result").loc[[entity_id], :].join(pred_df, how="outer") drawer = Drawer(main_df=df) drawer.draw_table()
def compare(entity_ids, schema_map_columns: dict = None, chart_type: ChartType = ChartType.line): entity_type_map_ids = _group_entity_ids(entity_ids=entity_ids) dfs = [] for entity_type in entity_type_map_ids: if schema_map_columns: for schema in schema_map_columns: columns = ["entity_id", "timestamp"] + schema_map_columns.get(schema) df = schema.query_data(entity_ids=entity_type_map_ids.get(entity_type), columns=columns) dfs.append(df) else: schema = get_kdata_schema(entity_type=entity_type) df = schema.query_data(entity_ids=entity_type_map_ids.get(entity_type)) dfs.append(df) all_df = pd.concat(dfs) if schema_map_columns: drawer = Drawer(main_df=all_df) drawer.draw(main_chart=chart_type, show=True) else: drawer = Drawer(main_df=all_df, sub_df_list=[all_df[["entity_id", "timestamp", "turnover"]].copy()]) drawer.draw_kline(show=True)
def composite(entity_id, data_schema, columns, filters=None): columns = ["entity_id", "timestamp"] + columns df = data_schema.query_data(entity_id=entity_id, columns=columns, filters=filters) drawer = Drawer(main_df=df) drawer.draw_pie(show=True)
the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'], filters: List = None, order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, accumulator: Accumulator = None, need_persist: bool = False, dry_run: bool = False, adjust_type: Union[AdjustType, str] = None, window=30) -> None: self.adjust_type = adjust_type transformer = TopBottomTransformer(window=window) super().__init__(entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, need_persist, dry_run, adjust_type) if __name__ == '__main__': factor = TopBottomFactor(codes=['601318'], start_timestamp='2005-01-01', end_timestamp=now_pd_timestamp(), level=IntervalLevel.LEVEL_1DAY, window=120) print(factor.factor_df) data_reader1 = DataReader(codes=['601318'], data_schema=Stock1dKdata, entity_schema=Stock) drawer = Drawer(main_df=data_reader1.data_df, factor_df_list=[factor.factor_df[['top', 'bottom']]]) drawer.draw_kline() # the __all__ is generated __all__ = ['TopBottomTransformer', 'TopBottomFactor']
def compare( entity_ids=None, codes=None, schema=None, columns=None, schema_map_columns: dict = None, chart_type: ChartType = ChartType.line, start_timestamp=None, scale_value: int = None, ): """ compare indicators(columns) of entities :param entity_ids: :param codes: :param schema: :param columns: :param schema_map_columns: key represents schema, value represents columns :param chart_type: "line", "area", "scatter", default "line" :param start_timestamp: " :param scale_value: compare with same value which scaled to scale_value """ dfs = [] # default compare kdata if schema_map_columns is None and schema is None: entity_type_map_ids = _group_entity_ids(entity_ids=entity_ids) for entity_type in entity_type_map_ids: schema = get_kdata_schema(entity_type=entity_type) df = schema.query_data( entity_ids=entity_type_map_ids.get(entity_type), start_timestamp=start_timestamp) dfs.append(df) all_df = pd.concat(dfs) drawer = Drawer(main_df=all_df, sub_df_list=[ all_df[["entity_id", "timestamp", "turnover"]].copy() ]) drawer.draw_kline(show=True, scale_value=scale_value) else: if schema_map_columns: for schema in schema_map_columns: columns = ["entity_id", "timestamp" ] + schema_map_columns.get(schema) df = schema.query_data(entity_ids=entity_ids, codes=codes, columns=columns, start_timestamp=start_timestamp) dfs.append(df) elif schema: columns = ["entity_id", "timestamp"] + columns df = schema.query_data(entity_ids=entity_ids, codes=codes, columns=columns, start_timestamp=start_timestamp) dfs.append(df) all_df = pd.concat(dfs) drawer = Drawer(main_df=all_df) drawer.draw(main_chart=chart_type, show=True, scale_value=scale_value)
transformer, accumulator, need_persist, only_compute_factor, factor_name, clear_state, only_load_factor, adjust_type, ) if __name__ == "__main__": factor = TopBottomFactor( codes=["601318"], start_timestamp="2005-01-01", end_timestamp=now_pd_timestamp(), level=IntervalLevel.LEVEL_1DAY, window=120, ) print(factor.factor_df) data_reader1 = DataReader(codes=["601318"], data_schema=Stock1dKdata, entity_schema=Stock) drawer = Drawer(main_df=data_reader1.data_df, factor_df_list=[factor.factor_df[["top", "bottom"]]]) drawer.draw_kline(show=True) # the __all__ is generated __all__ = ["TopBottomTransformer", "TopBottomFactor"]
def draw_line(self, show=True): drawer = Drawer(main_data=NormalData( self.data_df.copy()[['trader_name', 'timestamp', 'all_value']], category_field='trader_name')) return drawer.draw_line(show=show)
self.filters = filter super().__init__(Order, None, None, None, None, None, None, the_timestamp, start_timestamp, end_timestamp, columns, self.filters, order, None, level, category_field='trader_name', time_field='timestamp', computing_window=None) if __name__ == '__main__': reader = AccountStatsReader(trader_names=['000338_ma_trader']) drawer = Drawer(main_data=NormalData( reader.data_df.copy()[['trader_name', 'timestamp', 'all_value']], category_field='trader_name')) drawer.draw_line() # the __all__ is generated __all__ = ['AccountStatsReader', 'OrderReader']
Order, None, None, None, None, None, None, start_timestamp, end_timestamp, columns, self.filters, order, None, level, category_field="trader_name", time_field="timestamp", computing_window=None, ) if __name__ == "__main__": reader = AccountStatsReader(trader_names=["000338_ma_trader"]) drawer = Drawer( main_data=NormalData( reader.data_df.copy()[["trader_name", "timestamp", "all_value"]], category_field="trader_name" ) ) drawer.draw_line() # the __all__ is generated __all__ = ["clear_trader", "get_trader_info", "get_order_securities", "AccountStatsReader", "OrderReader"]