def __init__(self, entity_type='etf', exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type=entity_type, level=level, adjust_type=adjust_type) self.jq_trading_level = to_jq_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type auth(zvt_env['jq_username'], zvt_env['jq_password']) print(f"剩余{get_query_count()['spare']/10000}万")
def get_top_volume_entities(entity_type='stock', entity_ids=None, start_timestamp=None, end_timestamp=None, pct=0.1, return_type='positive', adjust_type: Union[AdjustType, str] = None, method='avg'): if not adjust_type and entity_type == 'stock': adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) filters = None if entity_ids: filters = [data_schema.entity_id.in_(entity_ids)] result, _ = get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column='turnover', pct=pct, method=method, return_type=return_type, filters=filters) return result
def __init__(self, region: Region, entity_id, timestamp, window=100, level=IntervalLevel.LEVEL_1DAY, entity_schema=Stock, range=0.3, std=1) -> None: self.entity_id = entity_id self.window = window data_schema = get_kdata_schema(EntityType( entity_schema.__name__.lower()), level=level) self.df = get_kdata(region=region, entity_id=entity_id, level=level, end_timestamp=timestamp, order=data_schema.timestamp.desc(), limit=window, columns=['volume', 'open', 'close', 'high', 'low']) self.range = range self.std = std
def __init__(self, entity_type='index', exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60) -> None: level = IntervalLevel(level) self.jq_trading_level = to_jq_trading_level(level) self.data_schema = get_kdata_schema(entity_type=entity_type, level=level) super().__init__('index', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) auth(zvt_env['jq_username'], zvt_env['jq_password'])
def __init__(self, entity_type='coin', exchanges=None, entity_ids=None, codes=None, day_data=False, batch_size=10, force_update=True, sleeping_time=1, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, one_day_trading_minutes=24 * 60, entity_filters=None) -> None: if entity_filters is None: entity_filters = [Coin.code.contains('/USDT')] level = IntervalLevel(level) self.data_schema = get_kdata_schema(entity_type=entity_type, level=level, adjust_type=None) self.ccxt_trading_level = level.value super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes, entity_filters)
def __init__(self, exchanges=['binance'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=10, default_size=2000, real_time=True, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, kdata_use_begin_time=False, close_hour=None, close_minute=None, level=IntervalLevel.LEVEL_TICK, one_day_trading_minutes=24 * 60) -> None: self.data_schema = get_kdata_schema(entity_type='coin', level=level) super().__init__('coin', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, IntervalLevel.LEVEL_TICK, kdata_use_begin_time, one_day_trading_minutes)
def get_top_performance_entities(entity_type='stock', start_timestamp=None, end_timestamp=None, pct=0.1, return_type='both', adjust_type: Union[AdjustType, str] = None): if not adjust_type and entity_type == 'stock': adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) return get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column='close', pct=pct, method='change', return_type=return_type)
def get_performance(entity_ids, start_timestamp=None, end_timestamp=None, adjust_type: Union[AdjustType, str] = None): entity_type, _, _ = decode_entity_id(entity_ids[0]) if not adjust_type and entity_type == 'stock': adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) result, _ = get_top_entities(data_schema=data_schema, column='close', start_timestamp=start_timestamp, end_timestamp=end_timestamp, pct=1, method='change', return_type='positive', filters=[data_schema.entity_id.in_(entity_ids)]) return result
def get_ref_vector(entity_id, end, window=100, level=IntervalLevel.LEVEL_1DAY, entity_schema=Stock): data_schema = get_kdata_schema(entity_schema.__name__, level=level) df = get_kdata(entity_id=entity_id, level=level, end_timestamp=end, order=data_schema.timestamp.desc(), limit=window, columns=['close', 'volume']) exp_data = np.zeros((window, 2)) exp_data[:, 0] = df['close'] exp_data[:, 1] = df['volume'] return exp_data
def get_top_volume_entities(entity_type='stock', start_timestamp=None, end_timestamp=None, pct=0.1, return_type='both', adjust_type: Union[AdjustType, str] = None, method='avg'): if not adjust_type and entity_type == 'stock': adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) result, _ = get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column='turnover', pct=pct, method=method, return_type=return_type) return result
def __init__(self, exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=True, sleeping_time=0, default_size=2000, real_time=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1WEEK, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60, adjust_type=AdjustType.qfq) -> None: level = IntervalLevel(level) adjust_type = AdjustType(adjust_type) self.data_schema = get_kdata_schema(entity_type='stock', level=level, adjust_type=adjust_type) self.bs_trading_level = to_bs_trading_level(level) super().__init__('stock', exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.adjust_type = adjust_type print("尝试登陆baostock") #####login##### lg = bs.login(user_id="anonymous", password="******") if (lg.error_code == '0'): print("登陆成功") else: print("登录失败")
def get_top_performance_entities(entity_type='stock', start_timestamp=None, end_timestamp=None, pct=0.1, return_type=None, adjust_type: Union[AdjustType, str] = None, filters=None, show_name=False): if not adjust_type and entity_type == 'stock': adjust_type = AdjustType.hfq data_schema = get_kdata_schema(entity_type=entity_type, adjust_type=adjust_type) return get_top_entities(data_schema=data_schema, start_timestamp=start_timestamp, end_timestamp=end_timestamp, column='close', pct=pct, method=WindowMethod.change, return_type=return_type, filters=filters, show_name=show_name)
def get_labels(self, entity_ids, x_timestamps, y_timestamps): dfs = [] for idx, timestamp in enumerate(x_timestamps): kdata_schema = get_kdata_schema(entity_type=self.entity_schema.__name__.lower(), level=self.level, adjust_type=self.adjust_type) y_df = kdata_schema.query_data(start_timestamp=timestamp, end_timestamp=y_timestamps[idx], entity_ids=entity_ids, columns=['entity_id', 'timestamp', 'close'], index=['entity_id', 'timestamp']) y_df = y_df.dropna() y_change = y_df.groupby(level=0)['close'].apply( lambda x: cal_change(x)).rename('y_change') if self.relative_performance: y_score = y_change.rank(pct=True).apply( lambda x: cal_performance(x)).rename('y_score') else: y_score = y_change df = y_score.to_frame() df['timestamp'] = timestamp df.set_index('timestamp', append=True) dfs.append(df) return pd.concat(dfs)
def on_trading_close(self, timestamp): self.logger.info("on_trading_close:{}".format(timestamp)) # remove the empty position self.account.positions = [ position for position in self.account.positions if position.long_amount > 0 or position.short_amount > 0 ] # clear the data which need recomputing the_id = "{}_{}".format(self.trader_name, to_time_str(timestamp, TIME_FORMAT_ISO8601)) self.account.value = 0 self.account.all_value = 0 for position in self.account.positions: entity_type, _, _ = decode_entity_id(position.entity_id) data_schema = get_kdata_schema(entity_type, level=IntervalLevel.LEVEL_1DAY, adjust_type=self.adjust_type) kdata = get_kdata( provider=self.provider, level=IntervalLevel.LEVEL_1DAY, entity_id=position.entity_id, order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1, adjust_type=self.adjust_type, ) closing_price = kdata["close"][0] position.available_long = position.long_amount position.available_short = position.short_amount if closing_price: if (position.long_amount is not None) and position.long_amount > 0: position.value = position.long_amount * closing_price self.account.value += position.value elif (position.short_amount is not None) and position.short_amount > 0: position.value = 2 * (position.short_amount * position.average_short_price) position.value -= position.short_amount * closing_price self.account.value += position.value # refresh profit position.profit = (closing_price - position.average_long_price ) * position.long_amount position.profit_rate = position.profit / ( position.average_long_price * position.long_amount) else: self.logger.warning( "could not refresh close value for position:{},timestamp:{}" .format(position.entity_id, timestamp)) position.id = "{}_{}_{}".format( self.trader_name, position.entity_id, to_time_str(timestamp, TIME_FORMAT_ISO8601)) position.timestamp = to_pd_timestamp(timestamp) position.account_stats_id = the_id self.account.id = the_id self.account.all_value = self.account.value + self.account.cash self.account.closing = True self.account.timestamp = to_pd_timestamp(timestamp) self.account.profit = ( self.account.all_value - self.account.input_money) / self.account.input_money self.session.add(self.account) self.session.commit() account_info = ( f"on_trading_close,holding size:{len(self.account.positions)} profit:{self.account.profit} input_money:{self.account.input_money} " f"cash:{self.account.cash} value:{self.account.value} all_value:{self.account.all_value}" ) self.logger.info(account_info)