def test_single_trader(): trader = SingleTrader(codes=['000338'], level=IntervalLevel.LEVEL_1DAY, start_timestamp='2019-01-01', end_timestamp='2020-01-10', trader_name='000338_single_trader', draw_result=False, adjust_type=AdjustType.qfq) trader.run() positions = trader.get_current_account().positions print(positions) account = trader.get_current_account() print(account) buy_price = get_kdata(region=Region.CHN, entity_id='stock_sz_000338', start_timestamp=buy_timestamp, end_timestamp=buy_timestamp, return_type='domain')[0] sell_price = get_kdata(region=Region.CHN, entity_id='stock_sz_000338', start_timestamp=sell_timestamp, end_timestamp=sell_timestamp, return_type='domain')[0] sell_lost = trader.account_service.slippage + trader.account_service.sell_cost buy_lost = trader.account_service.slippage + trader.account_service.buy_cost pct = (sell_price.close * (1 - sell_lost) - buy_price.close * (1 + buy_lost)) / buy_price.close * (1 + buy_lost) profit_rate = (account.all_value - account.input_money) / account.input_money assert round(profit_rate, 2) == round(pct, 2)
def test_jq_603220_kdata(): df = quote.get_kdata(entity_id='stock_sh_603220', session=day_k_session, level=IntervalLevel.LEVEL_1DAY, provider='joinquant') print(df) df = quote.get_kdata(entity_id='stock_sh_603220', session=day_1h_session, level=IntervalLevel.LEVEL_1HOUR, provider='joinquant') print(df)
def test_jq_603220_kdata(): df = get_kdata(region=Region.CHN, entity_id='stock_sh_603220', session=day_k_session, level=IntervalLevel.LEVEL_1DAY, provider=Provider.JoinQuant) print(df) df = get_kdata(region=Region.CHN, entity_id='stock_sh_603220', session=day_1h_session, level=IntervalLevel.LEVEL_1HOUR, provider=Provider.JoinQuant) print(df)
def filter_selector_long_targets(self, timestamp, selector: TargetSelector, long_targets: List[str]) -> List[str]: # 选择器选出的个股,再做进一步处理 if selector.level == IntervalLevel.LEVEL_1DAY: if not long_targets: return None entity_ids = [] for entity_id in long_targets: # 获取最近3k线 df = get_kdata( region=self.region, entity_id=entity_id, start_timestamp=timestamp - datetime.timedelta(20), end_timestamp=timestamp, columns=['entity_id', 'close', 'open', 'high', 'low']) if pd_is_not_null(df) and len(df) >= 3: df = df.iloc[-3:] # 收阳 se = df['close'] > df['open'] positive = np.all(se) # 高点比高点高 trending = df['high'][0] < df['high'][1] < df['high'][2] if positive and trending: entity_ids.append(entity_id) return entity_ids return long_targets
def test_to_high_level_kdata(): day_df = get_kdata(provider='joinquant', level=IntervalLevel.LEVEL_1DAY, entity_id='stock_sz_000338') print(day_df) df = to_high_level_kdata(kdata_df=day_df.loc[:'2019-09-01', :], to_level=IntervalLevel.LEVEL_1WEEK) print(df)
def on_finish_entity(self, entity, http_session): kdatas = get_kdata(region=self.region, provider=self.provider, entity_id=entity.id, level=IntervalLevel.LEVEL_1DAY.value, order=Etf1dKdata.timestamp.asc(), return_type='domain', filters=[Etf1dKdata.cumulative_net_value.is_(None)]) if kdatas and len(kdatas) > 0: start = kdatas[0].timestamp end = kdatas[-1].timestamp # 从东方财富获取基金累计净值 df = self.fetch_cumulative_net_value(entity, start, end, http_session) if pd_is_not_null(df): for kdata in kdatas: if kdata.timestamp in df.index: kdata.cumulative_net_value = df.loc[kdata.timestamp, 'LJJZ'] kdata.change_pct = df.loc[kdata.timestamp, 'JZZZL'] session = get_db_session(region=self.region, provider=self.provider, data_schema=self.data_schema) session.commit() self.logger.info(f'{entity.code} - {entity.name}累计净值更新完成...')
def __init__(self, entity_type='stock', exchanges=['sh', 'sz'], entity_ids=None, codes=None, batch_size=10, force_update=False, sleeping_time=10, default_size=2000, real_time=True, fix_duplicate_way='add', start_timestamp=None, end_timestamp=None, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=0, close_minute=0, one_day_trading_minutes=24 * 60) -> None: super().__init__(entity_type, exchanges, entity_ids, codes, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute, level, kdata_use_begin_time, one_day_trading_minutes) self.current_factors = {} self.latest_factors = {} for security_item in self.entities: kdata = get_kdata(entity_id=security_item.id, provider=self.provider, level=self.level.value, order=Stock1dKdata.timestamp.desc(), return_type='domain', session=self.session) if kdata: self.current_factors[security_item.id] = kdata[0].factor
def select_short_targets_from_levels(self, timestamp): # 因为不能做空,只从持仓里面算出需要卖的个股 positions = self.get_current_positions() if positions: entity_ids = [position.entity_id for position in positions] # 有效跌破5日线,卖出 input_df = get_kdata(region=self.region, entity_ids=entity_ids, start_timestamp=timestamp - datetime.timedelta(20), end_timestamp=timestamp, columns=['entity_id', 'close'], index=['entity_id', 'timestamp']) ma_df = input_df['close'].groupby(level=0).rolling(window=5, min_periods=5).mean() ma_df = ma_df.reset_index(level=0, drop=True) input_df['ma5'] = ma_df s = input_df['close'] < input_df['ma5'] input_df = s.to_frame(name='score') # 连续3日收在5日线下 df = input_df['score'].groupby(level=0).rolling(window=3, min_periods=3).apply( lambda x: np.logical_and.reduce(x)) df = df.reset_index(level=0, drop=True) input_df['score'] = df result_df = input_df[input_df['score'] == 1.0] if pd_is_not_null(result_df): short_df = result_df.loc[(slice(None), slice(timestamp, timestamp)), :] if pd_is_not_null(short_df): return short_df.index.get_level_values(0).tolist()
def test_jq_1d_kdata(): df = get_kdata(entity_id='stock_sz_000338', provider='joinquant', level=IntervalLevel.LEVEL_1DAY) se = df.loc['2019-04-19'] # make sure our fq is ok assert round(se['open'], 2) <= 12.93 assert round(se['high'], 2) <= 13.52 assert round(se['low'], 2) <= 12.89 assert round(se['close'], 2) <= 13.33
def test_jq_1d_hfq_kdata(): df = get_kdata(entity_id='stock_sz_000338', provider='joinquant', level=IntervalLevel.LEVEL_1DAY, adjust_type='hfq') se = df.loc['2019-04-08'] print(se) assert round(se['open'], 2) == 249.29 assert round(se['high'], 2) == 273.68 assert round(se['low'], 2) == 249.29 assert round(se['close'], 2) == 272.18
def test_jq_1mon_kdata(): df = get_kdata(entity_id='stock_sz_000338', provider='joinquant', level=IntervalLevel.LEVEL_1MON) se = df.loc['2010-01-29'] # make sure our fq is ok assert round(se['open'], 2) <= 5.44 assert round(se['high'], 2) <= 6.43 assert round(se['low'], 2) <= 5.2 assert round(se['close'], 2) <= 5.45
def on_trading_close(self, timestamp): self.logger.info('on_trading_close:{}'.format(timestamp)) self.latest_account['value'] = 0 self.latest_account['all_value'] = 0 for position in self.latest_account['positions']: # use qfq for stock entity_type, _, _ = decode_entity_id(position['entity_id']) data_schema = get_kdata_schema(entity_type, level=self.level) kdata = get_kdata(provider=self.provider, level=self.level, entity_id=position['entity_id'], order=data_schema.timestamp.desc(), end_timestamp=timestamp, limit=1) # use qfq for stock if entity_type == 'stock': closing_price = kdata['qfq_close'][0] else: closing_price = kdata['close'][0] position['available_long'] = position['long_amount'] position['available_short'] = position['short_amount'] if closing_price: if (position['long_amount'] is not None) and position['long_amount'] > 0: position['value'] = position['long_amount'] * closing_price self.latest_account['value'] += position['value'] elif (position['short_amount'] is not None) and position['short_amount'] > 0: position['value'] = 2 * (position['short_amount'] * position['average_short_price']) position[ 'value'] -= position['short_amount'] * closing_price self.latest_account['value'] += position['value'] else: self.logger.warning( 'could not refresh close value for position:{},timestamp:{}' .format(position['entity_id'], timestamp)) # remove the empty position self.latest_account['positions'] = [ position for position in self.latest_account['positions'] if position['long_amount'] > 0 or position['short_amount'] > 0 ] self.latest_account['all_value'] = self.latest_account[ 'value'] + self.latest_account['cash'] self.latest_account['closing'] = True self.latest_account['timestamp'] = to_pd_timestamp(timestamp) self.logger.info('on_trading_close:{},latest_account:{}'.format( timestamp, self.latest_account)) self.persist_account(timestamp)
def test_ma_transformer(): df = get_kdata(region=Region.CHN, entity_id='stock_sz_000338', start_timestamp='2019-01-01', provider=Provider.JoinQuant, index=['entity_id', 'timestamp']) t = MaTransformer(windows=[5, 10]) result_df = t.transform(df) print(result_df)
def test_ma_transformer(): df = get_kdata(entity_id='stock_sz_000338', start_timestamp='2019-01-01', provider='joinquant') t = MaTransformer(windows=[5, 10]) result_df = t.transform(df) print(result_df)
def record(self, entity, start, end, size, timestamps): # 不复权 try: df = get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, fields=[ 'date', 'open', 'close', 'low', 'high', 'volume', 'money' ], include_now=False) except Exception as e: # just ignore the error,for some new stocks not in the index self.logger.exception(e) return None df['name'] = entity.name df.rename(columns={'money': 'turnover'}, inplace=True) df['timestamp'] = pd.to_datetime(df['date']) df['provider'] = 'joinquant' df['level'] = self.level.value # 前复权 end_timestamp = to_time_str(now_pd_timestamp()) qfq_df = get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, fields=['date', 'open', 'close', 'low', 'high'], fq_ref_date=end_timestamp, include_now=False) # not need to update past df['qfq_close'] = qfq_df['close'] df['qfq_open'] = qfq_df['open'] df['qfq_high'] = qfq_df['high'] df['qfq_low'] = qfq_df['low'] check_df = qfq_df.head(1) check_date = check_df['date'][0] current_df = get_kdata(entity_id=entity.id, provider=self.provider, start_timestamp=check_date, end_timestamp=check_date, limit=1, level=self.level) if df_is_not_null(current_df): old = current_df.iloc[0, :]['qfq_close'] new = check_df['close'][0] # 相同时间的close不同,表明前复权需要重新计算 if old != new: self.factor = new / old self.last_timestamp = pd.Timestamp(check_date) return df.to_dict(orient='records')
def test_MacdTransformer(): df = get_kdata(entity_id='stock_sz_000338', start_timestamp='2019-01-01', provider='joinquant', index=['entity_id', 'timestamp']) t = MacdTransformer() result_df = t.transform(df) print(result_df)
def test_to_high_level_kdata(): day_df = get_kdata(region=Region.CHN, provider=Provider.JoinQuant, level=IntervalLevel.LEVEL_1DAY, entity_id='stock_sz_000338') print(day_df) df = to_high_level_kdata(kdata_df=day_df.loc[:'2019-09-01', :], to_level=IntervalLevel.LEVEL_1WEEK) print(df)
def on_finish_entity(self, entity): kdatas = get_kdata( provider=self.provider, entity_id=entity.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[ self.data_schema.hfq_close.is_(None), self.data_schema.timestamp >= to_pd_timestamp('2005-01-01') ]) if kdatas: start = kdatas[0].timestamp end = kdatas[-1].timestamp # get hfq from joinquant df = get_price(to_jq_entity_id(entity), start_date=to_time_str(start), end_date=now_time_str(), frequency='daily', fields=['factor', 'open', 'close', 'low', 'high'], skip_paused=True, fq='post') if df_is_not_null(df): # fill hfq data for kdata in kdatas: time_str = to_time_str(kdata.timestamp) if time_str in df.index: kdata.hfq_open = df.loc[time_str, 'open'] kdata.hfq_close = df.loc[time_str, 'close'] kdata.hfq_high = df.loc[time_str, 'high'] kdata.hfq_low = df.loc[time_str, 'low'] kdata.factor = df.loc[time_str, 'factor'] self.session.add_all(kdatas) self.session.commit() latest_factor = df.factor[-1] # factor not change yet, no need to reset the qfq past if latest_factor == self.current_factors.get(entity.id): sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\' and (qfq_close isnull or qfq_high isnull or qfq_low isnull or qfq_open isnull)'.format( self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) else: sql = 'UPDATE {} SET qfq_close=hfq_close/{},qfq_high=hfq_high/{}, qfq_open= hfq_open/{}, qfq_low= hfq_low/{} where ' \ 'entity_id=\'{}\' and level=\'{}\''.format(self.data_schema.__table__, latest_factor, latest_factor, latest_factor, latest_factor, entity.id, self.level.value) self.logger.info(sql) self.session.execute(sql) self.session.commit()
def record(self, entity, start, end, size, timestamps): # 只要前复权数据 if not self.end_timestamp: df = get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], fq_ref_date=to_time_str(now_pd_timestamp()), include_now=True) else: end_timestamp = to_time_str(self.end_timestamp) df = get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], end_dt=end_timestamp, fq_ref_date=to_time_str(now_pd_timestamp()), include_now=False) if pd_is_not_null(df): df['name'] = entity.name df.rename(columns={'money': 'turnover', 'date': 'timestamp'}, inplace=True) df['entity_id'] = entity.id df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'joinquant' df['level'] = self.level.value df['code'] = entity.code # 判断是否需要重新计算之前保存的前复权数据 check_df = df.head(1) check_date = check_df['timestamp'][0] current_df = get_kdata(entity_id=entity.id, provider=self.provider, start_timestamp=check_date, end_timestamp=check_date, limit=1, level=self.level) if pd_is_not_null(current_df): old = current_df.iloc[0, :]['close'] new = check_df['close'][0] # 相同时间的close不同,表明前复权需要重新计算 if round(old, 2) != round(new, 2): self.factor = new / old self.last_timestamp = pd.Timestamp(check_date) def generate_kdata_id(se): if self.level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format(se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601)) df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) return None
def test_jq_1d_kdata(): df = get_kdata(region=Region.CHN, entity_id='stock_sz_000338', provider=Provider.JoinQuant, level=IntervalLevel.LEVEL_1DAY) print(df) se = df.loc['2019-04-08'] # make sure our fq is ok assert round(se['open'], 2) <= 12.86 assert round(se['high'], 2) <= 14.16 assert round(se['low'], 2) <= 12.86 assert round(se['close'], 2) <= 14.08
def record(self, entity, start, end, size, timestamps, http_session): if self.adjust_type == AdjustType.hfq: fq_ref_date = '2000-01-01' else: fq_ref_date = to_time_str(now_pd_timestamp(Region.CHN)) if not self.end_timestamp: df = jq_get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], fq_ref_date=fq_ref_date) else: end_timestamp = to_time_str(self.end_timestamp) df = jq_get_bars(to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], end_date=end_timestamp, fq_ref_date=fq_ref_date) # self.logger.info("record {} for {}, size:{}".format(self.data_schema.__name__, entity.id, len(df))) if pd_is_not_null(df): # start_timestamp = to_time_str(df.iloc[1]['timestamp']) # end_timestamp = to_time_str(df.iloc[-1]['timestamp']) # 判断是否需要重新计算之前保存的前复权数据 if self.adjust_type == AdjustType.qfq: check_df = df.head(1) check_date = check_df['timestamp'][0] current_df = get_kdata(region=self.region, entity_id=entity.id, provider=self.provider, start_timestamp=check_date, end_timestamp=check_date, limit=1, level=self.level, adjust_type=self.adjust_type) if pd_is_not_null(current_df): old = current_df.iloc[0, :]['close'] new = check_df['close'][0] # 相同时间的close不同,表明前复权需要重新计算 if round(old, 2) != round(new, 2): qfq_factor = new / old last_timestamp = pd.Timestamp(check_date) self.recompute_qfq(entity, qfq_factor=qfq_factor, last_timestamp=last_timestamp) return df return None
def on_finish_entity(self, entity): if self.factor != 0: kdatas = get_kdata(provider=self.provider, entity_id=entity.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[self.data_schema.timestamp < self.last_timestamp]) if kdatas: # fill hfq data for kdata in kdatas: kdata.qfq_open = kdata.qfq_open * self.factor kdata.qfq_close = kdata.qfq_close * self.factor kdata.qfq_high = kdata.qfq_high * self.factor kdata.qfq_low = kdata.qfq_low * self.factor self.session.add_all(kdatas) self.session.commit()
def on_finish_entity(self, entity): # 重新计算前复权数据 if self.factor != 0: kdatas = get_kdata(provider=self.provider, entity_id=entity.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', session=self.session, filters=[self.data_schema.timestamp < self.last_timestamp]) if kdatas: self.logger.info('recomputing {} qfq kdata,factor is:{}'.format(entity.code, self.factor)) for kdata in kdatas: kdata.open = round(kdata.open * self.factor, 2) kdata.close = round(kdata.close * self.factor, 2) kdata.high = round(kdata.high * self.factor, 2) kdata.low = round(kdata.low * self.factor, 2) self.session.add_all(kdatas) self.session.commit()
def __init__(self, entity_ids=None, codes=None, batch_size=10, force_update=False, sleeping_time=5, default_size=2000, one_shot=False, fix_duplicate_way='ignore', start_timestamp=None, end_timestamp=None, contain_unfinished_data=False, level=IntervalLevel.LEVEL_1DAY, kdata_use_begin_time=False, close_hour=15, close_minute=0, one_day_trading_minutes=4 * 60) -> None: # 周线以上级别用日线来合成 assert level <= IntervalLevel.LEVEL_1DAY self.data_schema = get_kdata_schema(entity_type='stock', level=level) self.jq_trading_level = to_jq_trading_level(level) super().__init__('stock', ['sh', 'sz'], entity_ids, codes, batch_size, force_update, sleeping_time, default_size, one_shot, fix_duplicate_way, start_timestamp, end_timestamp, contain_unfinished_data, level, kdata_use_begin_time, close_hour, close_minute, one_day_trading_minutes) # 读取已经保存的最新factor,更新时有变化才需要重新计算前复权价格 self.current_factors = {} for security_item in self.entities: kdata = get_kdata(entity_id=security_item.id, provider=self.provider, level=self.level.value, order=self.data_schema.timestamp.desc(), limit=1, return_type='domain', session=self.session) if kdata: self.current_factors[security_item.id] = kdata[0].factor self.logger.info('{} latest factor:{}'.format( security_item.id, kdata[0].factor)) auth(JQ_ACCOUNT, JQ_PASSWD)
def filter_selector_long_targets(self, timestamp, selector: TargetSelector, long_targets: List[str]) -> List[str]: # 选择器选出的个股,再做进一步处理 if selector.level == IntervalLevel.LEVEL_1DAY: if not long_targets: return None df = get_kdata(region=self.region, entity_ids=long_targets, start_timestamp=timestamp, end_timestamp=timestamp, columns=['entity_id', 'turnover']) if pd_is_not_null(df): df.sort_values(by=['turnover']) return df['entity_id'].iloc[:10].tolist() return None return long_targets
def select_long_targets_from_levels(self, timestamp): # self.level_map_long_targets里面是各级别选中的标的,默认是各级别都选中才要 long_targets = super().select_long_targets_from_levels(timestamp) if self.level >= IntervalLevel.LEVEL_1DAY: if not long_targets: return None df = get_kdata(region=self.region, entity_ids=list(long_targets), start_timestamp=timestamp, end_timestamp=timestamp, columns=['entity_id', 'turnover']) if pd_is_not_null(df): df.sort_values(by=['turnover']) if len(df['entity_id']) > 5: return df['entity_id'].iloc[5:10].tolist() return df['entity_id'].tolist() return None return long_targets
def on_trading_signal(self, trading_signal: TradingSignal): self.logger.debug('trader:{} received trading signal:{}'.format( self.trader_name, trading_signal)) entity_id = trading_signal.entity_id current_timestamp = trading_signal.the_timestamp order_type = AccountService.trading_signal_to_order_type( trading_signal.trading_signal_type) trading_level = trading_signal.trading_level.value if order_type: try: kdata = get_kdata(provider=self.provider, entity_id=entity_id, level=trading_level, start_timestamp=current_timestamp, end_timestamp=current_timestamp, limit=1) if kdata is not None and not kdata.empty: # use qfq for stock entity_type, _, _ = decode_entity_id(kdata['entity_id'][0]) if entity_type == 'stock': the_price = kdata['qfq_close'][0] else: the_price = kdata['close'][0] if the_price: self.order(entity_id=entity_id, current_price=the_price, current_timestamp=current_timestamp, order_pct=trading_signal.position_pct, order_money=trading_signal.order_money, order_type=order_type) else: self.logger.warning( 'ignore trading signal,wrong kdata,entity_id:{},timestamp:{},kdata:{}' .format(entity_id, current_timestamp, kdata.to_dict(orient='records'))) else: self.logger.warning( 'ignore trading signal,could not get kdata,entity_id:{},timestamp:{}' .format(entity_id, current_timestamp)) except Exception as e: self.logger.exception(e)
def on_finish_entity(self, entity): kdatas = get_kdata(entity_id=entity.id, level=IntervalLevel.LEVEL_1DAY.value, order=Index1dKdata.timestamp.asc(), return_type='domain', session=self.session, filters=[Index1dKdata.cumulative_net_value.is_(None)]) if kdatas and len(kdatas) > 0: start = kdatas[0].timestamp end = kdatas[-1].timestamp # 从东方财富获取基金累计净值 df = self.fetch_cumulative_net_value(entity, start, end) if df is not None and not df.empty: for kdata in kdatas: if kdata.timestamp in df.index: kdata.cumulative_net_value = df.loc[kdata.timestamp, 'LJJZ'] kdata.change_pct = df.loc[kdata.timestamp, 'JZZZL'] self.session.commit() self.logger.info(f'{entity.code} - {entity.name}累计净值更新完成...')
def on_trading_signal(self, trading_signal: TradingSignal): entity_id = trading_signal.entity_id happen_timestamp = trading_signal.happen_timestamp order_type = AccountService.trading_signal_to_order_type( trading_signal.trading_signal_type) trading_level = trading_signal.trading_level.value if order_type: try: kdata = get_kdata(provider=self.provider, entity_id=entity_id, level=trading_level, start_timestamp=happen_timestamp, end_timestamp=happen_timestamp, limit=1, adjust_type=self.adjust_type) except Exception as e: self.logger.error(e) raise WrongKdataError("could not get kdata") if pd_is_not_null(kdata): entity_type, _, _ = decode_entity_id(kdata['entity_id'][0]) the_price = kdata['close'][0] if the_price: self.order(entity_id=entity_id, current_price=the_price, current_timestamp=happen_timestamp, order_pct=trading_signal.position_pct, order_money=trading_signal.order_money, order_type=order_type) else: self.logger.warning( 'ignore trading signal,wrong kdata,entity_id:{},timestamp:{},kdata:{}' .format(entity_id, happen_timestamp, kdata.to_dict(orient='records'))) else: self.logger.warning( 'ignore trading signal,could not get kdata,entity_id:{},timestamp:{}' .format(entity_id, happen_timestamp))
def recompute_qfq(self, entity, qfq_factor, last_timestamp): # 重新计算前复权数据 if qfq_factor != 0: kdatas = get_kdata(region=self.region, provider=self.provider, entity_id=entity.id, level=self.level.value, order=self.data_schema.timestamp.asc(), return_type='domain', filters=[self.data_schema.timestamp < last_timestamp]) if kdatas: self.logger.info('recomputing {} qfq kdata,factor is:{}'.format(entity.code, qfq_factor)) for kdata in kdatas: kdata.open = round(kdata.open * qfq_factor, 2) kdata.close = round(kdata.close * qfq_factor, 2) kdata.high = round(kdata.high * qfq_factor, 2) kdata.low = round(kdata.low * qfq_factor, 2) session = get_db_session(region=self.region, provider=self.provider, data_schema=self.data_schema) session.bulk_save_objects(kdatas) session.commit()