def __init__(self, region: Region, entity_id, timestamp, window=100, level=IntervalLevel.LEVEL_1DAY, entity_schema=Stock, range=0.3, std=1) -> None: self.entity_id = entity_id self.window = window data_schema = get_kdata_schema(EntityType( entity_schema.__name__.lower()), level=level) self.df = get_kdata(region=region, entity_id=entity_id, level=level, end_timestamp=timestamp, order=data_schema.timestamp.desc(), limit=window, columns=['volume', 'open', 'close', 'high', 'low']) self.range = range self.std = std
def __init__( self, region: Region, entity_schema: EntityMixin = Stock, provider: Provider = Provider.Default, entity_provider: Provider = Provider.Default, entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = [ 'id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low' ], filters: List = None, order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = 10, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, need_persist: bool = True, dry_run: bool = True, # added fields short_window: int = 5, long_window: int = 10) -> None: self.factor_schema = get_ma_state_stats_schema(entity_type=EntityType( entity_schema.__name__.lower()), level=level) self.short_window = short_window self.long_window = long_window transformer: Transformer = MaTransformer( windows=[short_window, long_window], cal_change_pct=True) accumulator = MaAccumulator(short_window=short_window, long_window=long_window) super().__init__(region, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, need_persist, dry_run)
def register(cls): # register the entity if issubclass(cls, EntityMixin): entity_type_ = entity_type if not entity_type: entity_type_ = EntityType(cls.__name__.lower()) if entity_type_ not in zvt_context.entity_types: zvt_context.entity_types.append(entity_type_) zvt_context.entity_schema_map[entity_type_] = cls add_to_map_list(the_map=zvt_context.entity_map_schemas, key=entity_type, value=cls) return cls
def __init__(self, region: Region, entity_schema: EntityMixin = Stock, provider: Provider = Provider.Default, entity_provider: Provider = Provider.Default, entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = [ 'id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low' ], filters: List = None, order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None, need_persist: bool = False, dry_run: bool = False, adjust_type: Union[AdjustType, str] = None) -> None: self.adjust_type = adjust_type self.data_schema = get_kdata_schema(EntityType( entity_schema.__name__.lower()), level=level, adjust_type=adjust_type) if transformer: self.indicator_cols = transformer.indicators super().__init__(self.data_schema, region, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, need_persist, dry_run)
def get_ref_vector(region: Region, entity_id, end, window=100, level=IntervalLevel.LEVEL_1DAY, entity_schema=Stock): data_schema = get_kdata_schema(EntityType(entity_schema.__name__.lower()), level=level) df = get_kdata(region=region, entity_id=entity_id, level=level, end_timestamp=end, order=data_schema.timestamp.desc(), limit=window, columns=['close', 'volume']) exp_data = np.zeros((window, 2)) exp_data[:, 0] = df['close'] exp_data[:, 1] = df['volume'] return exp_data
def decode_entity_id(entity_id: str): result = entity_id.split('_') entity_type = EntityType(result[0].lower()) exchange = result[1] code = ''.join(result[2:]) return entity_type, exchange, code