def __init__(self, entity_schema: Type[EntityMixin] = Stock, provider: str = None, entity_provider: str = None, entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters: List = None, order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, transformer: Transformer = None, accumulator: Accumulator = None, need_persist: bool = False, dry_run: bool = False, factor_name: str = None, clear_state: bool = False, adjust_type: Union[AdjustType, str] = None) -> None: if columns is None: columns = [ 'id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low' ] # 股票默认使用后复权 if entity_schema == Stock and not adjust_type: adjust_type = AdjustType.hfq self.adjust_type = adjust_type self.data_schema = get_kdata_schema(entity_schema.__name__, level=level, adjust_type=adjust_type) if transformer: self.indicator_cols = transformer.indicators if not factor_name: if type(level) == str: factor_name = f'{type(self).__name__.lower()}_{level}' else: factor_name = f'{type(self).__name__.lower()}_{level.value}' super().__init__(self.data_schema, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, need_persist, dry_run, factor_name, clear_state)
def get_top_volume_entities(region: Region, entity_type=EntityType.Stock, entity_ids=None, start_timestamp=None, end_timestamp=None, pct=0.1, return_type=TopType.positive, adjust_type: Union[AdjustType, str] = None, method=WindowMethod.avg): if not adjust_type and entity_type == EntityType.Stock: adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) filters = None if entity_ids: filters = [data_schema.entity_id.in_(entity_ids)] result, _ = get_top_entities(region=region, data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column='turnover', pct=pct, method=method, return_type=return_type, filters=filters) return result
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=zvt_config['batch_size'], real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq, share_para=None) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type=EntityType.Stock, level=level, adjust_type=adjust_type) self.bao_trading_level = to_bao_trading_level(level) super().__init__(EntityType.Stock, exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes, share_para=share_para) self.adjust_type = adjust_type
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=True, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60) -> None: level = IntervalLevel(level) self.data_schema = get_kdata_schema(entity_type='index', level=level) self.jq_trading_level = to_jq_trading_level(level) super().__init__('index', exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) get_token(zvt_config['jq_username'], zvt_config['jq_password'], force=True)
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) self.tl_trading_level = to_jq_trading_level(level) if self.tl_trading_level != "1d": self.logger.info('通联数据目前仅支持日K线,level入参仅支持:1d,实际level入参为'.format(self.tl_trading_level)) raise Exception super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type self.tonglian_conn = create_engine( f"mysql://{zvt_env['tl_username']}:{zvt_env['tl_password']}@{zvt_env['tl_server_address']}:" f"{zvt_env['tl_server_port']}/{zvt_env['tl_db_name']}?charset=utf8mb4", pool_recycle=3600, echo=False).connect()
def __init__(self, # exchanges=['hk','sh','sz','o','a','n'], exchanges=['o','a','n'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) self.jq_trading_level = to_jq_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type # 调用登录函数(激活后使用,不需要用户名密码) loginResult = c.start("ForceLogin=1", '') if (loginResult.ErrorCode != 0): print("login in fail") exit()
def get_trading_signals_figure(order_reader: OrderReader, entity_id: str, start_timestamp=None, end_timestamp=None): entity_type, _, _ = decode_entity_id(entity_id) data_schema = get_kdata_schema(entity_type=entity_type, level=order_reader.level) if not start_timestamp: start_timestamp = order_reader.start_timestamp if not end_timestamp: end_timestamp = order_reader.end_timestamp kdata_reader = DataReader(entity_ids=[entity_id], data_schema=data_schema, entity_schema=entity_schema_map.get(entity_type), start_timestamp=start_timestamp, end_timestamp=end_timestamp, level=order_reader.level) # generate the annotation df order_reader.move_on(timeout=0) df = order_reader.data_df.copy() df = df[df.entity_id == entity_id].copy() if pd_is_not_null(df): df['value'] = df['order_price'] df['flag'] = df['order_type'].apply(lambda x: order_type_flag(x)) df['color'] = df['order_type'].apply(lambda x: order_type_color(x)) print(df.tail()) drawer = Drawer(main_df=kdata_reader.data_df, annotation_df=df) return drawer.draw_kline(show=False)
def __init__(self, entity_type='index', exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, force_update=False, sleeping_time=10, default_size=10000, real_time=True, fix_duplicate_way='add', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=0, close_minute=0, one_day_trading_minutes=24 * 60) -> None: self.data_schema = get_kdata_schema(entity_type=entity_type, level=level) super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes)
def __init__(self, exchanges=['huobipro', 'binance'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='coin', level=level, adjust_type=adjust_type) super().__init__('coin', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type
def on_trading_close(self, timestamp): self.logger.debug('on_trading_close:{}'.format(timestamp)) self.account.value = 0 self.account.all_value = 0 for position in self.account.positions: entity_type, _, _ = decode_entity_id(position.entity_id) data_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=self.adjust_type) kdata = get_kdata(provider=self.provider, level=IntervalLevel.LEVEL_1DAY, entity_id=position.entity_id, order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1, adjust_type=self.adjust_type) closing_price = kdata['close'][0] position.available_long = position.long_amount position.available_short = position.short_amount if closing_price: if (position.long_amount is not None) and position.long_amount > 0: position.value = position.long_amount * closing_price self.account.value += position.value elif (position.short_amount is not None) and position.short_amount > 0: position.value = 2 * (position.short_amount * position.average_short_price) position.value -= position.short_amount * closing_price self.account.value += position.value else: self.logger.warning( 'could not refresh close value for position:{},timestamp:{}' .format(position['entity_id'], timestamp)) # remove the empty position self.account.positions = [ position for position in self.account.positions if position.long_amount > 0 or position.short_amount > 0 ] self.account.all_value = self.account.value + self.account.cash self.account.closing = True self.account.timestamp = to_pd_timestamp(timestamp) self.logger.debug('on_trading_close:{},latest_account:{}'.format( timestamp, self.account)) self.persist_account(timestamp)
def on_trading_close(self, timestamp): self.logger.info('on_trading_close:{}'.format(timestamp)) self.latest_account['value'] = 0 self.latest_account['all_value'] = 0 for position in self.latest_account['positions']: entity_type, _, _ = decode_entity_id(position['entity_id']) data_schema = get_kdata_schema(entity_type, level=self.level) kdata = get_kdata(provider=self.provider, level=IntervalLevel.LEVEL_1DAY, entity_id=position['entity_id'], order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1) closing_price = kdata['close'][0] position['available_long'] = position['long_amount'] position['available_short'] = position['short_amount'] if closing_price: if (position['long_amount'] is not None) and position['long_amount'] > 0: position['value'] = position['long_amount'] * closing_price self.latest_account['value'] += position['value'] elif (position['short_amount'] is not None) and position['short_amount'] > 0: position['value'] = 2 * (position['short_amount'] * position['average_short_price']) position[ 'value'] -= position['short_amount'] * closing_price self.latest_account['value'] += position['value'] else: self.logger.warning( 'could not refresh close value for position:{},timestamp:{}' .format(position['entity_id'], timestamp)) # remove the empty position self.latest_account['positions'] = [ position for position in self.latest_account['positions'] if position['long_amount'] > 0 or position['short_amount'] > 0 ] self.latest_account['all_value'] = self.latest_account[ 'value'] + self.latest_account['cash'] self.latest_account['closing'] = True self.latest_account['timestamp'] = to_pd_timestamp(timestamp) self.logger.info('on_trading_close:{},latest_account:{}'.format( timestamp, self.latest_account)) self.persist_account(timestamp)
def __init__(self, region: Region, entity_schema: EntityMixin = Stock, provider: Provider = Provider.Default, entity_provider: Provider = Provider.Default, entity_ids: List[str] = None, exchanges: List[str] = None, codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = [ 'id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low' ], filters: List = None, order: object = None, limit: int = None, level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = None, transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None, need_persist: bool = False, dry_run: bool = False, adjust_type: Union[AdjustType, str] = None) -> None: self.adjust_type = adjust_type self.data_schema = get_kdata_schema(EntityType( entity_schema.__name__.lower()), level=level, adjust_type=adjust_type) if transformer: self.indicator_cols = transformer.indicators super().__init__(self.data_schema, region, entity_schema, provider, entity_provider, entity_ids, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, need_persist, dry_run)
def get_top_performance_entities(entity_type=EntityType.Stock, start_timestamp=None, end_timestamp=None, pct=0.1, return_type=None, adjust_type: Union[AdjustType, str] = None): if not adjust_type and entity_type == EntityType.Stock: adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) return get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column='close', pct=pct, method=WindowMethod.change, return_type=return_type)
def get_performance(region: Region, entity_ids, start_timestamp=None, end_timestamp=None, adjust_type: Union[AdjustType, str] = None): entity_type, _, _ = decode_entity_id(entity_ids[0]) if not adjust_type and entity_type == EntityType.Stock: adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) result, _ = get_top_entities( region=region, data_schema=data_schema, column='close', start_timestamp=start_timestamp, end_timestamp=end_timestamp, pct=1, method=WindowMethod.change, return_type=TopType.positive, filters=[data_schema.entity_id.in_(entity_ids)]) return result
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) self.bs_trading_level = to_bs_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type print("尝试登陆baostock") #####login##### lg = bs.login(user_id="anonymous", password="******") if (lg.error_code == '0'): print("登陆成功") else: print("登录失败")
def on_trading_close(self, timestamp): self.logger.info('on_trading_close:{}'.format(timestamp)) # remove the empty position self.account.positions = [ position for position in self.account.positions if position.long_amount > 0 or position.short_amount > 0 ] # clear the data which need recomputing the_id = '{}_{}'.format(self.trader_name, to_time_str(timestamp, TIME_FORMAT_ISO8601)) self.account.value = 0 self.account.all_value = 0 for position in self.account.positions: entity_type, _, _ = decode_entity_id(position.entity_id) data_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=self.adjust_type) kdata = get_kdata(provider=self.provider, level=IntervalLevel.LEVEL_1DAY, entity_id=position.entity_id, order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1, adjust_type=self.adjust_type) closing_price = kdata['close'][0] position.available_long = position.long_amount position.available_short = position.short_amount if closing_price: if (position.long_amount is not None) and position.long_amount > 0: position.value = position.long_amount * closing_price self.account.value += position.value elif (position.short_amount is not None) and position.short_amount > 0: position.value = 2 * (position.short_amount * position.average_short_price) position.value -= position.short_amount * closing_price self.account.value += position.value # refresh profit position.profit = (closing_price - position.average_long_price) \ * position.long_amount position.profit_rate = position.profit / ( position.average_long_price * position.long_amount) else: self.logger.warning( 'could not refresh close value for position:{},timestamp:{}' .format(position.entity_id, timestamp)) position.id = '{}_{}_{}'.format( self.trader_name, position.entity_id, to_time_str(timestamp, TIME_FORMAT_ISO8601)) position.timestamp = to_pd_timestamp(timestamp) position.account_stats_id = the_id self.account.id = the_id self.account.all_value = self.account.value + self.account.cash self.account.closing = True self.account.timestamp = to_pd_timestamp(timestamp) self.account.profit = ( self.account.all_value - self.account.input_money) / self.account.input_money self.session.add(self.account) self.session.commit() account_info = f'on_trading_close,holding size:{len(self.account.positions)} profit:{self.account.profit} input_money:{self.account.input_money} ' \ f'cash:{self.account.cash} value:{self.account.value} all_value:{self.account.all_value}' self.logger.info(account_info)