Ejemplo n.º 1
0
def get_trading_signals_figure(order_reader: OrderReader,
                               entity_id: str,
                               start_timestamp=None,
                               end_timestamp=None,
                               adjust_type=None):
    entity_type, _, _ = decode_entity_id(entity_id)

    data_schema = get_kdata_schema(entity_type=entity_type,
                                   level=order_reader.level,
                                   adjust_type=adjust_type)
    if not start_timestamp:
        start_timestamp = order_reader.start_timestamp
    if not end_timestamp:
        end_timestamp = order_reader.end_timestamp
    kdata_reader = DataReader(
        entity_ids=[entity_id],
        data_schema=data_schema,
        entity_schema=zvt_context.tradable_schema_map.get(entity_type),
        start_timestamp=start_timestamp,
        end_timestamp=end_timestamp,
        level=order_reader.level,
    )

    # generate the annotation df
    order_reader.move_on(timeout=0)
    df = order_reader.data_df.copy()
    df = df[df.entity_id == entity_id].copy()
    if pd_is_not_null(df):
        df["value"] = df["order_price"]
        df["flag"] = df["order_type"].apply(lambda x: order_type_flag(x))
        df["color"] = df["order_type"].apply(lambda x: order_type_color(x))
    print(df.tail())

    drawer = Drawer(main_df=kdata_reader.data_df, annotation_df=df)
    return drawer.draw_kline(show=False, height=800)
Ejemplo n.º 2
0
def compare(entity_ids, schema_map_columns: dict = None, chart_type: ChartType = ChartType.line):
    entity_type_map_ids = _group_entity_ids(entity_ids=entity_ids)
    dfs = []
    for entity_type in entity_type_map_ids:
        if schema_map_columns:
            for schema in schema_map_columns:
                columns = ["entity_id", "timestamp"] + schema_map_columns.get(schema)
                df = schema.query_data(entity_ids=entity_type_map_ids.get(entity_type), columns=columns)
                dfs.append(df)
        else:
            schema = get_kdata_schema(entity_type=entity_type)
            df = schema.query_data(entity_ids=entity_type_map_ids.get(entity_type))
            dfs.append(df)
    all_df = pd.concat(dfs)

    if schema_map_columns:
        drawer = Drawer(main_df=all_df)
        drawer.draw(main_chart=chart_type, show=True)
    else:
        drawer = Drawer(main_df=all_df, sub_df_list=[all_df[["entity_id", "timestamp", "turnover"]].copy()])
        drawer.draw_kline(show=True)
Ejemplo n.º 3
0
                 the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None,
                 end_timestamp: Union[str, pd.Timestamp] = None,
                 columns: List = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'],
                 filters: List = None, order: object = None, limit: int = None,
                 level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id',
                 time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False,
                 fill_method: str = 'ffill', effective_number: int = None,
                 accumulator: Accumulator = None, need_persist: bool = False, dry_run: bool = False,
                 adjust_type: Union[AdjustType, str] = None, window=30) -> None:
        self.adjust_type = adjust_type

        transformer = TopBottomTransformer(window=window)

        super().__init__(entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp,
                         start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field,
                         time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer,
                         accumulator, need_persist, dry_run, adjust_type)


if __name__ == '__main__':
    factor = TopBottomFactor(codes=['601318'], start_timestamp='2005-01-01',
                             end_timestamp=now_pd_timestamp(),
                             level=IntervalLevel.LEVEL_1DAY, window=120)
    print(factor.factor_df)

    data_reader1 = DataReader(codes=['601318'], data_schema=Stock1dKdata, entity_schema=Stock)

    drawer = Drawer(main_df=data_reader1.data_df, factor_df_list=[factor.factor_df[['top', 'bottom']]])
    drawer.draw_kline()
# the __all__ is generated
__all__ = ['TopBottomTransformer', 'TopBottomFactor']
Ejemplo n.º 4
0
        self.adjust_type = adjust_type

        transformer = TopBottomTransformer(window=window)

        super().__init__(entity_schema, provider, entity_provider, entity_ids,
                         exchanges, codes, start_timestamp, end_timestamp,
                         columns, filters, order, limit, level, category_field,
                         time_field, computing_window, keep_all_timestamp,
                         fill_method, effective_number, transformer,
                         accumulator, need_persist, only_compute_factor,
                         factor_name, clear_state, only_load_factor,
                         adjust_type)


if __name__ == '__main__':
    factor = TopBottomFactor(codes=['601318'],
                             start_timestamp='2005-01-01',
                             end_timestamp=now_pd_timestamp(),
                             level=IntervalLevel.LEVEL_1DAY,
                             window=120)
    print(factor.factor_df)

    data_reader1 = DataReader(codes=['601318'],
                              data_schema=Stock1dKdata,
                              entity_schema=Stock)

    drawer = Drawer(main_df=data_reader1.data_df,
                    factor_df_list=[factor.factor_df[['top', 'bottom']]])
    drawer.draw_kline(show=True)
# the __all__ is generated
__all__ = ['TopBottomTransformer', 'TopBottomFactor']
Ejemplo n.º 5
0
def compare(
    entity_ids=None,
    codes=None,
    schema=None,
    columns=None,
    schema_map_columns: dict = None,
    chart_type: ChartType = ChartType.line,
    start_timestamp=None,
    scale_value: int = None,
):
    """
    compare indicators(columns) of entities

    :param entity_ids:
    :param codes:
    :param schema:
    :param columns:
    :param schema_map_columns: key represents schema, value represents columns
    :param chart_type: "line", "area", "scatter", default "line"
    :param start_timestamp: "
    :param scale_value: compare with same value which scaled to scale_value
    """

    dfs = []
    # default compare kdata
    if schema_map_columns is None and schema is None:
        entity_type_map_ids = _group_entity_ids(entity_ids=entity_ids)
        for entity_type in entity_type_map_ids:
            schema = get_kdata_schema(entity_type=entity_type)
            df = schema.query_data(
                entity_ids=entity_type_map_ids.get(entity_type),
                start_timestamp=start_timestamp)
            dfs.append(df)
        all_df = pd.concat(dfs)
        drawer = Drawer(main_df=all_df,
                        sub_df_list=[
                            all_df[["entity_id", "timestamp",
                                    "turnover"]].copy()
                        ])
        drawer.draw_kline(show=True, scale_value=scale_value)
    else:
        if schema_map_columns:
            for schema in schema_map_columns:
                columns = ["entity_id", "timestamp"
                           ] + schema_map_columns.get(schema)
                df = schema.query_data(entity_ids=entity_ids,
                                       codes=codes,
                                       columns=columns,
                                       start_timestamp=start_timestamp)
                dfs.append(df)
        elif schema:
            columns = ["entity_id", "timestamp"] + columns
            df = schema.query_data(entity_ids=entity_ids,
                                   codes=codes,
                                   columns=columns,
                                   start_timestamp=start_timestamp)
            dfs.append(df)

        all_df = pd.concat(dfs)
        drawer = Drawer(main_df=all_df)
        drawer.draw(main_chart=chart_type, show=True, scale_value=scale_value)