def record(self, entity, start, end, size, timestamps): if not self.end_timestamp: df = get_bars( to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'] ) else: end_timestamp = to_time_str(self.end_timestamp) df = get_bars( to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], end_date=end_timestamp, ) if pd_is_not_null(df): df["name"] = entity.name df.rename(columns={ "money": "turnover", "date": "timestamp" }, inplace=True) df["entity_id"] = entity.id df["timestamp"] = pd.to_datetime(df["timestamp"]) df["provider"] = "joinquant" df["level"] = self.level.value df["code"] = entity.code def generate_kdata_id(se): if self.level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format( se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format( se["entity_id"], to_time_str(se["timestamp"], fmt=TIME_FORMAT_ISO8601)) df["id"] = df[["entity_id", "timestamp"]].apply(generate_kdata_id, axis=1) df = df.drop_duplicates(subset="id", keep="last") df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) return None
def record(self, entity, start, end, size, timestamps): if not self.end_timestamp: df = get_bars( to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'] ) else: end_timestamp = to_time_str(self.end_timestamp) df = get_bars( to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], end_date=end_timestamp) if pd_is_not_null(df): df['name'] = entity.name df.rename(columns={ 'money': 'turnover', 'date': 'timestamp' }, inplace=True) df['entity_id'] = entity.id df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'joinquant' df['level'] = self.level.value df['code'] = entity.code def generate_kdata_id(se): if self.level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601)) df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) df = df.drop_duplicates(subset='id', keep='last') df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) return None
def jq_get_bars(code="600000.XSHG", count=10, unit='1d', end_date=None, fq_ref_date=None, return_type='df', parse_dates=['date']): try: return jq.get_bars(code=code, count=count, unit=unit, end_date=end_date, fq_ref_date=fq_ref_date, return_type=return_type, parse_dates=parse_dates) except Exception as e: logger.error(f'jq_get_bars, code: {code}, error: {e}') return None
def record(self, entity, start, end, size, timestamps): if self.adjust_type == AdjustType.hfq: fq_ref_date = '2000-01-01' else: fq_ref_date = to_time_str(now_pd_timestamp()) if not self.end_timestamp: df = get_bars( to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], fq_ref_date=fq_ref_date) else: end_timestamp = to_time_str(self.end_timestamp) df = get_bars( to_jq_entity_id(entity), count=size, unit=self.jq_trading_level, # fields=['date', 'open', 'close', 'low', 'high', 'volume', 'money'], end_date=end_timestamp, fq_ref_date=fq_ref_date) if pd_is_not_null(df): df['name'] = entity.name df.rename(columns={ 'money': 'turnover', 'date': 'timestamp' }, inplace=True) df['entity_id'] = entity.id df['timestamp'] = pd.to_datetime(df['timestamp']) df['provider'] = 'joinquant' df['level'] = self.level.value df['code'] = entity.code # 判断是否需要重新计算之前保存的前复权数据 if self.adjust_type == AdjustType.qfq: check_df = df.head(1) check_date = check_df['timestamp'][0] current_df = get_kdata(entity_id=entity.id, provider=self.provider, start_timestamp=check_date, end_timestamp=check_date, limit=1, level=self.level, adjust_type=self.adjust_type) if pd_is_not_null(current_df): old = current_df.iloc[0, :]['close'] new = check_df['close'][0] # 相同时间的close不同,表明前复权需要重新计算 if round(old, 2) != round(new, 2): qfq_factor = new / old last_timestamp = pd.Timestamp(check_date) self.recompute_qfq(entity, qfq_factor=qfq_factor, last_timestamp=last_timestamp) def generate_kdata_id(se): if self.level >= IntervalLevel.LEVEL_1DAY: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_DAY)) else: return "{}_{}".format( se['entity_id'], to_time_str(se['timestamp'], fmt=TIME_FORMAT_ISO8601)) df['id'] = df[['entity_id', 'timestamp']].apply(generate_kdata_id, axis=1) df = df.drop_duplicates(subset='id', keep='last') df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) return None